Compare commits

...

4 Commits

Author SHA1 Message Date
57ca074d0e Update devbox/cli/devbox 2025-07-15 01:42:56 +00:00
d7aec36cf2 Update devbox/cli/devbox 2025-07-15 00:59:23 +00:00
38f5671cd0 Update devbox/cli/devbox 2025-07-14 16:11:48 +00:00
79d3d226be Update devbox/cli/devbox 2025-07-14 15:51:54 +00:00

View File

@ -640,6 +640,38 @@ init_compile_env() {
# Update for export environments []
docker exec -i "$DEVBOX_NAME" bash <<EOF
# Detect the network environment
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Checking network connectivity..."
# Check if you can access Google
if ping -c 1 -W 5 google.com > /dev/null 2>&1; then
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Google is accessible, using default Ubuntu sources"
# Use the default Ubuntu source without any modifications
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Keeping default Ubuntu sources"
else
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Google is not accessible, switching to Aliyun mirrors"
# Back up the original source list
cp /etc/apt/sources.list /etc/apt/sources.list.backup
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Backup original sources.list to sources.list.backup"
# Replace with an Alibaba Cloud mirror source
cat > /etc/apt/sources.list << 'SOURCES_EOF'
deb http://mirrors.aliyun.com/ubuntu/ jammy main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ jammy-security main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ jammy-updates main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ jammy-backports main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ jammy main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ jammy-security main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ jammy-updates main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ jammy-backports main restricted universe multiverse
SOURCES_EOF
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Successfully replaced with Aliyun mirrors"
fi
# Update package list
echo "[INIT] $(date '+%Y-%m-%d %H:%M:%S') Updating package lists..."
apt-get update
echo "[INIT] \$(date '+%Y-%m-%d %H:%M:%S') Starting DevBox initialization..."
# Export environment variables
@ -916,6 +948,146 @@ compile_backend_service() {
echo "[BACKEND] $(date '+%Y-%m-%d %H:%M:%S') Start backend service from $DEVBOX_NAME."
docker exec -i "$DEVBOX_NAME" bash <<EOF
# Define the install_and_verify_packages function inside the container
install_and_verify_packages() {
local requirements_file="\$1"
if [[ -z "\$requirements_file" ]]; then
echo "[ERROR] Requirements file path is required" >&2
return 1
fi
if [[ ! -f "\$requirements_file" ]]; then
echo "[ERROR] Requirements file '\$requirements_file' does not exist" >&2
return 1
fi
if [[ ! -r "\$requirements_file" ]]; then
echo "[ERROR] Requirements file '\$requirements_file' is not readable" >&2
return 1
fi
echo "[INFO] Starting package installation and verification for: \$requirements_file"
# Create temporary directory for hash verification
local temp_dir=\$(mktemp -d)
local hash_file="\$temp_dir/package_hashes.txt"
# Function to get package hash from PyPI
get_package_hash() {
local package_name="\$1"
local version="\$2"
local pypi_url="https://pypi.org/pypi/\${package_name}/\${version}/json"
local hash_value=""
if command -v curl &>/dev/null; then
hash_value=\$(curl -s "\$pypi_url" | grep -o '"sha256":"[^"]*"' | head -1 | cut -d'"' -f4 2>/dev/null || echo "")
elif command -v wget &>/dev/null; then
hash_value=\$(wget -qO- "\$pypi_url" | grep -o '"sha256":"[^"]*"' | head -1 | cut -d'"' -f4 2>/dev/null || echo "")
fi
echo "\$hash_value"
}
# Function to verify installed package
verify_package() {
local package_name="\$1"
local expected_hash="\$2"
if [[ -z "\$expected_hash" ]]; then
echo "[INFO] No hash available for \$package_name, skipping verification"
return 0
fi
local package_location=""
if command -v python3.11 &>/dev/null; then
package_location=\$(python3.11 -c "import \$package_name; print(\$package_name.__file__)" 2>/dev/null || echo "")
fi
if [[ -z "\$package_location" ]]; then
echo "[ERROR] Package \$package_name not found, verification failed" >&2
return 1
fi
local actual_hash=\$(find "\$(dirname "\$package_location")" -name "*\$package_name*" -type f -exec sha256sum {} \; 2>/dev/null | head -1 | cut -d' ' -f1)
if [[ "\$actual_hash" == "\$expected_hash" ]]; then
echo "[INFO] Package \$package_name verification successful"
return 0
else
echo "[ERROR] Package \$package_name verification failed - Expected: \$expected_hash, Got: \$actual_hash" >&2
return 1
fi
}
# Step 1: Install all packages
echo "[INFO] Installing all packages from \$requirements_file"
if ! pip install --no-cache-dir -r "\$requirements_file"; then
echo "[ERROR] Failed to install packages" >&2
rm -rf "\$temp_dir"
return 1
fi
# Step 2: Collect hashes for verification
echo "[INFO] Collecting package hashes for verification"
while IFS= read -r line; do
[[ "\$line" =~ ^[[:space:]]*# ]] && continue
[[ -z "\$line" ]] && continue
local package_name=\$(echo "\$line" | cut -d'=' -f1 | cut -d'[' -f1 | tr -d ' ')
local version=\$(echo "\$line" | grep -o '[=<>!][=<>!]*[^;]*' | head -1 | sed 's/[=<>!]*//' || echo "")
if [[ -n "\$package_name" ]]; then
echo "[INFO] Getting hash for \$package_name\${version:+:\$version}"
local hash_value=\$(get_package_hash "\$package_name" "\$version")
if [[ -n "\$hash_value" ]]; then
echo "\$package_name:\$hash_value" >> "\$hash_file"
fi
fi
done < "\$requirements_file"
# Step 3: Verify each package and reinstall if failed
echo "[INFO] Verifying installed packages"
local failed_packages=()
while IFS=: read -r package_name hash_value; do
if [[ -n "\$package_name" && -n "\$hash_value" ]]; then
if ! verify_package "\$package_name" "\$hash_value"; then
failed_packages+=("\$package_name")
echo "[WARN] Package \$package_name failed verification, will reinstall"
fi
fi
done < "\$hash_file"
# Reinstall failed packages
if [[ \${#failed_packages[@]} -gt 0 ]]; then
echo "[INFO] Reinstalling \${#failed_packages[@]} failed packages"
for package in "\${failed_packages[@]}"; do
echo "[INFO] Reinstalling \$package"
if ! pip install --no-cache-dir --force-reinstall "\$package"; then
echo "[ERROR] Failed to reinstall \$package" >&2
rm -rf "\$temp_dir"
return 1
fi
# Verify again after reinstall
local package_hash=\$(grep "^\$package:" "\$hash_file" | cut -d: -f2)
if ! verify_package "\$package" "\$package_hash"; then
echo "[ERROR] Package \$package still failed verification after reinstall" >&2
rm -rf "\$temp_dir"
return 1
fi
done
fi
echo "[INFO] All packages installed and verified successfully"
rm -rf "\$temp_dir"
return 0
}
# Check if /home/devbox/.backend.pid exits
if [ -f /home/devbox/.backend.pid ]; then
backend_pid=\$(cat /home/devbox/.backend.pid)
@ -988,7 +1160,7 @@ compile_backend_service() {
# Check if it's the first time by verifying if the backend dependencies have been installed
if [ ! -f "/home/devbox/.backend_deps_installed" ]; then
echo "[BACKEND] \$(date '+%Y-%m-%d %H:%M:%S') Install backend dependencies..."
pip install -r /home/devbox/freeleaps/apps/freeleaps/requirements.txt
install_and_verify_packages /home/devbox/freeleaps/apps/freeleaps/requirements.txt
if ! pip show async_timeout; then
echo "[BACKEND] \$(date '+%Y-%m-%d %H:%M:%S') async_timeout is missing. Installing..."
pip install async_timeout
@ -1034,7 +1206,7 @@ compile_backend_service() {
if [ \$IS_NEW_REQ_ADDED -eq 1 ]; then
echo "[BACKEND] \$(date '+%Y-%m-%d %H:%M:%S') Reinstalling dependencies..."
pip install -r /home/devbox/freeleaps/apps/freeleaps/requirements.txt
install_and_verify_packages /home/devbox/freeleaps/apps/freeleaps/requirements.txt
fi
# Undo update for /home/devbox/freeleaps/apps/requirements.txt
rm /home/devbox/freeleaps/apps/freeleaps/requirements.txt
@ -1048,7 +1220,7 @@ compile_backend_service() {
# Check if all dependencies are installed, if not, install them
if ! pip check; then
echo "[BACKEND] \$(date '+%Y-%m-%d %H:%M:%S') Some dependencies are missing. Reinstalling..."
pip install -r /home/devbox/freeleaps/apps/freeleaps/requirements.txt
install_and_verify_packages /home/devbox/freeleaps/apps/freeleaps/requirements.txt
fi
# pip install async_timeout if not installed
@ -1094,7 +1266,7 @@ compile_backend_service() {
if [ \$IS_NEW_REQ_ADDED -eq 1 ]; then
echo "[BACKEND] \$(date '+%Y-%m-%d %H:%M:%S') Reinstalling dependencies..."
pip install -r /home/devbox/freeleaps/apps/requirements.txt
install_and_verify_packages /home/devbox/freeleaps/apps/requirements.txt
fi
# Undo update for /home/devbox/freeleaps/apps/requirements.txt
@ -3582,7 +3754,9 @@ devbox_init_parse_requirements() {
add_arg '--devbox-image-name' "devbox_v1"
fi
if [ -z "$(get_arg '--devbox-image-tag')" ]; then
if [ "$current_arch" = "amd64" ]; then
add_arg '--devbox-image-tag' "devbox_local_amd64"
else
add_arg '--devbox-image-tag' "devbox_local"
fi