Last active
May 22, 2025 01:05
-
-
Save linuxmalaysia/2d356c1548f1cd6fa5a49eed87ba1cd9 to your computer and use it in GitHub Desktop.
Download Elastic Assets Manifest GNUPG for airgap installation
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
### Script mudah untuk download asset dan manifest | |
### daripada elastic downloads bagi tujuan | |
### Internal repo kepada Elastic Fleet. | |
### use with own risks | |
### buat directory ini bawah html /usr/share/nginx/html | |
### Harisfazillah Bin Jamel 28/12/2024, update 26/03/2025 | |
### Added Manifest and GNUPG - 20250522 | |
### Ubuntu / Debian - apt install nginx | |
### Alma Linux / Rocky Linux - dnf install nginx | |
### Created with Google Gemini | |
# Define common variables | |
DOWNLPATH="/usr/share/nginx/html" | |
VERSI="9.0.0" | |
ENDPOINT_VERSION="$VERSI" # Use consistent versioning | |
LOCAL_MIRROR_URL="http://localhost" # Base URL for your local Nginx mirror | |
# Initialize manifest versions to empty strings for later display | |
REMOTE_MANIFEST_VERSION="" | |
LOCAL_MANIFEST_VERSION="" | |
# Check for distribution and install Nginx, jq, curl, wget, gnupg, and unzip | |
echo "Checking OS distribution and installing necessary packages (Nginx, jq, curl, wget, gnupg, unzip)..." | |
if [[ -f /etc/os-release ]]; then | |
source /etc/os-release | |
if [[ "$ID" == "ubuntu" || "$ID" == "debian" ]]; then | |
sudo apt update | |
sudo apt install -y nginx jq curl wget gnupg unzip | |
elif [[ "$ID_LIKE" == *"rhel"* || "$ID" == "centos" || "$ID" == "rocky" || "$ID" == "almalinux" ]]; then | |
sudo dnf install -y nginx jq curl wget gnupg unzip | |
else | |
echo "Warning: Unrecognized distribution. Please ensure Nginx, jq, curl, wget, gnupg, and unzip are installed manually." | |
fi | |
else | |
echo "Warning: Could not determine OS distribution. Please ensure Nginx, jq, curl, wget, gnupg, and unzip are installed manually." | |
fi | |
# Function to download assets (common for Elasticsearch, Kibana, APM Server etc.) | |
download_asset() { | |
local asset_name="$1" | |
local base_url="https://artifacts.elastic.co/downloads/" | |
local download_dir="$DOWNLPATH/downloads/$asset_name" | |
echo "Downloading $asset_name..." | |
sudo mkdir -p "$download_dir" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $download_dir. Skipping $asset_name download."; return 1; fi | |
sudo chmod 755 "$download_dir" | |
local tar_gz_file="$download_dir/$asset_name-$VERSI-linux-x86_64.tar.gz" | |
local sha512_file="$download_dir/$asset_name-$VERSI-linux-x86_64.tar.gz.sha512" | |
local asc_file="$download_dir/$asset_name-$VERSI-linux-x86_64.tar.gz.asc" | |
if [ -f "$tar_gz_file" ]; then echo " $asset_name-$VERSI-linux-x86_64.tar.gz already exists. Skipping download."; else sudo curl -o "$tar_gz_file" "$base_url$asset_name/$asset_name-$VERSI-linux-x86_64.tar.gz" || echo "Error downloading tar.gz for $asset_name"; fi | |
if [ -f "$sha512_file" ]; then echo " $sha512_file already exists. Skipping download."; else sudo curl -o "$sha512_file" "$base_url$asset_name/$asset_name-$VERSI-linux-x86_64.tar.gz.sha512" || echo "Error downloading sha512 for $asset_name"; fi | |
if [ -f "$asc_file" ]; then echo " $asc_file already exists. Skipping download."; else sudo curl -o "$asc_file" "$base_url$asset_name/$asset_name-$VERSI-linux-x86_64.tar.gz.asc" || echo "Error downloading asc for $asset_name"; fi | |
} | |
# Function to download beats assets (Linux) | |
download_beats_asset_linux() { | |
local beat_name="$1" | |
local base_url="https://artifacts.elastic.co/downloads/beats/" | |
local download_dir="$DOWNLPATH/downloads/beats/$beat_name" | |
echo "Downloading $beat_name (Linux)..." | |
sudo mkdir -p "$download_dir" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $download_dir. Skipping $beat_name (Linux) download."; return 1; fi | |
sudo chmod 755 "$download_dir" | |
local tar_gz_file="$download_dir/$beat_name-$VERSI-linux-x86_64.tar.gz" | |
local sha512_file="$download_dir/$beat_name-$VERSI-linux-x86_64.tar.gz.sha512" | |
local asc_file="$download_dir/$beat_name-$VERSI-linux-x86_64.tar.gz.asc" | |
if [ -f "$tar_gz_file" ]; then echo " $tar_gz_file already exists. Skipping download."; else sudo curl -o "$tar_gz_file" "$base_url$beat_name/$beat_name-$VERSI-linux-x86_64.tar.gz" || echo "Error downloading tar.gz for $beat_name (Linux)"; fi | |
if [ -f "$sha512_file" ]; then echo " $sha512_file already exists. Skipping download."; else sudo curl -o "$sha512_file" "$base_url$beat_name/$beat_name-$VERSI-linux-x86_64.tar.gz.sha512" || echo "Error downloading sha512 for $beat_name (Linux)"; fi | |
if [ -f "$asc_file" ]; then echo " $asc_file already exists. Skipping download."; else sudo curl -o "$asc_file" "$base_url$beat_name/$beat_name-$VERSI-linux-x86_64.tar.gz.asc" || echo "Error downloading asc for $beat_name (Linux)"; fi | |
} | |
# Function to download beats assets (Windows) | |
download_beats_asset_windows() { | |
local beat_name="$1" | |
local base_url="https://artifacts.elastic.co/downloads/beats/" | |
local download_dir="$DOWNLPATH/downloads/beats/$beat_name" | |
echo "Downloading $beat_name (Windows)..." | |
sudo mkdir -p "$download_dir" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $download_dir. Skipping $beat_name (Windows) download."; return 1; fi | |
sudo chmod 755 "$download_dir" | |
local zip_file="$download_dir/$beat_name-$VERSI-windows-x86_64.zip" | |
local sha512_file="$download_dir/$beat_name-$VERSI-windows-x86_64.zip.sha512" | |
local asc_file="$download_dir/$beat_name-$VERSI-windows-x86_64.zip.asc" | |
if [ -f "$zip_file" ]; then echo " $zip_file already exists. Skipping download."; else sudo curl -o "$zip_file" "$base_url$beat_name/$beat_name-$VERSI-windows-x86_64.zip" || echo "Error downloading zip for $beat_name (Windows)"; fi | |
if [ -f "$sha512_file" ]; then echo " $sha512_file already exists. Skipping download."; else sudo curl -o "$sha512_file" "$base_url$beat_name/$beat_name-$VERSI-windows-x86_64.zip.sha512" || echo "Error downloading sha512 for $beat_name (Windows)"; fi | |
if [ -f "$asc_file" ]; then echo " $asc_file already exists. Skipping download."; else sudo curl -o "$asc_file" "$base_url$beat_name/$beat_name-$VERSI-windows-x86_64.zip.asc" || echo "Error downloading asc for $beat_name (Windows)"; fi | |
} | |
# Function to download the Endpoint Security offline package (Linux x86_64) | |
download_endpoint_offline_package() { | |
local base_url="https://artifacts.elastic.co/downloads/endpoint/packages/" | |
local download_dir="$DOWNLPATH/downloads/endpoint/offline_package" | |
echo "Downloading Endpoint Security offline package (Linux x86_64)..." | |
sudo mkdir -p "$download_dir" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $download_dir. Skipping Endpoint offline package download."; return 1; fi | |
sudo chmod 755 "$download_dir" | |
local zip_file="$download_dir/endpoint-bundle-offline-linux-x86_64-$ENDPOINT_VERSION.zip" | |
local sha512_file="$download_dir/endpoint-bundle-offline-linux-x86_64-$ENDPOINT_VERSION.zip.sha512" | |
local asc_file="$download_dir/endpoint-bundle-offline-linux-x86_64-$ENDPOINT_VERSION.zip.asc" | |
if [ -f "$zip_file" ]; then echo " $zip_file already exists. Skipping download."; else sudo curl -o "$zip_file" "$base_url/endpoint-bundle-offline-linux-x86_64-$ENDPOINT_VERSION.zip" || echo "Error downloading offline package zip"; fi | |
if [ -f "$sha512_file" ]; then echo " $sha512_file already exists. Skipping download."; else sudo curl -o "$sha512_file" "$base_url/endpoint-bundle-offline-linux-x86_64-$ENDPOINT_VERSION.zip.sha512" || echo "Error downloading offline package sha512"; fi | |
if [ -f "$asc_file" ]; then echo " $asc_file already exists. Skipping download."; else sudo curl -o "$asc_file" "$base_url/endpoint-bundle-offline-linux-x86_64-$ENDPOINT_VERSION.zip.asc" || echo "Error downloading offline package asc"; fi | |
} | |
# Function to download the Kibana plugin for Endpoint Security | |
download_kibana_plugin() { | |
local base_url="https://artifacts.elastic.co/downloads/security/" | |
local download_dir="$DOWNLPATH/downloads/kibana_plugin" | |
local kibana_version_suffix="-8.17.3" # Assuming Kibana version matches Elastic Stack version. Adjust if your Kibana version is different. | |
echo "Downloading Kibana plugin for Endpoint Security..." | |
sudo mkdir -p "$download_dir" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $download_dir. Skipping Kibana plugin download."; return 1; fi | |
sudo chmod 755 "$download_dir" | |
local zip_file="$download_dir/security-$VERSI$kibana_version_suffix.zip" | |
if [ -f "$zip_file" ]; then echo " $zip_file already exists. Skipping download."; else sudo curl -o "$zip_file" "$base_url/security-$VERSI$kibana_version_suffix.zip" || echo "Error downloading Kibana plugin zip"; fi | |
} | |
# Function to download the Fleet Server PGP key | |
download_fleet_server_pgp_key() { | |
local target_dir="$DOWNLPATH/downloads/fleet-server/elastic-agent-upgrade-keys" | |
local pgp_key_filename="default.pgp" | |
local pgp_key_url="https://artifacts.elastic.co/GPG-KEY-elastic-agent" | |
echo "Downloading Fleet Server PGP key to $target_dir/$pgp_key_filename..." | |
# Create directory if it doesn't exist | |
sudo mkdir -p "$target_dir" | |
if [ $? -ne 0 ]; then | |
echo "Error: Failed to create directory $target_dir. Skipping PGP key download." | |
return 1 | |
fi | |
sudo chmod 755 "$target_dir" # Ensure directory permissions are set | |
# Download the PGP key | |
local pgp_file_path="$target_dir/$pgp_key_filename" | |
if [ -f "$pgp_file_path" ]; then | |
echo " $pgp_key_filename already exists. Skipping download." | |
else | |
sudo curl -o "$pgp_file_path" "$pgp_key_url" | |
if [ $? -ne 0 ]; then | |
echo "Error: Failed to download PGP key from $pgp_key_url." | |
return 1 | |
fi | |
fi | |
sudo chmod 644 "$pgp_file_path" # Set permissions for the downloaded file | |
echo "Fleet Server PGP key downloaded successfully to $pgp_file_path." | |
return 0 | |
} | |
# --- Main Download Process --- | |
echo "Starting main download process..." | |
# Download APM Server | |
download_asset apm-server | |
# Download Auditbeat (Linux) | |
download_beats_asset_linux auditbeat | |
# Download Elastic Agent (Linux) | |
download_beats_asset_linux elastic-agent | |
# Download Elastic Agent (Windows) | |
download_beats_asset_windows elastic-agent | |
# Download Filebeat (Linux) | |
download_beats_asset_linux filebeat | |
# Download Heartbeat (Linux) | |
download_beats_asset_linux heartbeat | |
# Download Metricbeat (Linux) | |
download_beats_asset_linux metricbeat | |
# Download Osquerybeat (Linux) | |
download_beats_asset_linux osquerybeat | |
# Download Packetbeat (Linux) | |
download_beats_asset_linux packetbeat | |
# Download Cloudbeat | |
download_asset cloudbeat | |
# Download Endpoint Security (single tar.gz) | |
download_asset endpoint-dev | |
# --- Endpoint Security Manifest and Granular Artifacts Download with Version Check --- | |
echo "Processing Endpoint Security manifest and granular artifacts..." | |
ENDPOINT_DOWNLOAD_PATH="$DOWNLPATH/downloads/endpoint" | |
MANIFEST_DIR="$ENDPOINT_DOWNLOAD_PATH/manifest" | |
MANIFEST_ZIP_FILENAME="artifacts-$ENDPOINT_VERSION.zip" | |
REMOTE_MANIFEST_URL="https://artifacts.security.elastic.co/downloads/endpoint/manifest/$MANIFEST_ZIP_FILENAME" | |
LOCAL_MANIFEST_URL="$LOCAL_MIRROR_URL/downloads/endpoint/manifest/$MANIFEST_ZIP_FILENAME" | |
LOCAL_MANIFEST_PATH="$MANIFEST_DIR/$MANIFEST_ZIP_FILENAME" | |
sudo mkdir -p "$MANIFEST_DIR" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $MANIFEST_DIR. Skipping granular Endpoint Security download."; exit 1; fi | |
sudo chmod 755 "$ENDPOINT_DOWNLOAD_PATH" "$MANIFEST_DIR" | |
if command -v curl &> /dev/null && command -v unzip &> /dev/null && command -v jq &> /dev/null && command -v wget &> /dev/null && command -v xargs &> /dev/null; then | |
# Get remote manifest version | |
echo "Checking remote Endpoint Security manifest version from $REMOTE_MANIFEST_URL..." | |
TEMP_REMOTE_ZIP="/tmp/remote_manifest_$$.zip" # Temporary file for remote zip | |
curl -s "$REMOTE_MANIFEST_URL" -o "$TEMP_REMOTE_ZIP" | |
CURL_STATUS=$? | |
REMOTE_MANIFEST_VERSION="" # Reset for this check | |
if [[ "$CURL_STATUS" -ne 0 ]]; then | |
echo "Warning: curl failed to download remote manifest (status: $CURL_STATUS)." | |
echo " Raw content (if any): $(cat "$TEMP_REMOTE_ZIP" 2>/dev/null | head -c 200) (truncated)" | |
elif [ ! -f "$TEMP_REMOTE_ZIP" ]; then | |
echo "Warning: Remote manifest zip file was not created." | |
else | |
REMOTE_MANIFEST_VERSION=$(unzip -p "$TEMP_REMOTE_ZIP" manifest.json 2>/dev/null | jq -r .manifest_version 2>/dev/null) | |
UNZIP_JQ_STATUS=$? | |
if [[ "$UNZIP_JQ_STATUS" -ne 0 || -z "$REMOTE_MANIFEST_VERSION" ]]; then | |
echo "Warning: Could not extract version from remote manifest zip." | |
echo " unzip/jq exit status: $UNZIP_JQ_STATUS" | |
echo " Content of manifest.json from zip (if any): $(unzip -p "$TEMP_REMOTE_ZIP" manifest.json 2>/dev/null | head -c 200) (truncated)" | |
fi | |
fi | |
rm -f "$TEMP_REMOTE_ZIP" # Clean up temporary file | |
# Get local manifest version | |
echo "Checking local Endpoint Security manifest version from $LOCAL_MIRROR_URL/downloads/endpoint/manifest/$MANIFEST_ZIP_FILENAME..." | |
LOCAL_MANIFEST_VERSION="" # Reset for this check | |
TEMP_LOCAL_CURL_ZIP="/tmp/local_curl_manifest_$$.zip" # Temporary file for curl from localhost | |
curl -s "$LOCAL_MIRROR_URL/downloads/endpoint/manifest/$MANIFEST_ZIP_FILENAME" -o "$TEMP_LOCAL_CURL_ZIP" | |
CURL_LOCAL_STATUS=$? | |
if [[ "$CURL_LOCAL_STATUS" -eq 0 && -s "$TEMP_LOCAL_CURL_ZIP" ]]; then # -s checks if file is not empty | |
echo " Successfully fetched content from local Nginx." | |
LOCAL_MANIFEST_VERSION=$(unzip -p "$TEMP_LOCAL_CURL_ZIP" manifest.json 2>/dev/null | jq -r .manifest_version 2>/dev/null) | |
if [[ -z "$LOCAL_MANIFEST_VERSION" ]]; then | |
echo " Warning: Could not extract version from content fetched via Nginx. Content might be invalid or not a zip." | |
echo " Content (truncated): $(cat "$TEMP_LOCAL_CURL_ZIP" | head -c 200)" | |
fi | |
else | |
echo " Local manifest not yet served by Nginx or curl failed (status: $CURL_LOCAL_STATUS)." | |
echo " Attempting to read directly from local file: $LOCAL_MANIFEST_PATH." | |
if [ -f "$LOCAL_MANIFEST_PATH" ]; then | |
echo " Reading directly from local file: $LOCAL_MANIFEST_PATH" | |
LOCAL_MANIFEST_VERSION=$(sudo unzip -p "$LOCAL_MANIFEST_PATH" manifest.json 2>/dev/null | jq -r .manifest_version 2>/dev/null) | |
if [[ -z "$LOCAL_MANIFEST_VERSION" ]]; then | |
echo " Warning: Could not extract version from local file directly. File might be invalid or not a zip." | |
echo " File content (truncated): $(sudo head -c 200 "$LOCAL_MANIFEST_PATH")" | |
fi | |
else | |
echo " Local manifest file not found at $LOCAL_MANIFEST_PATH." | |
fi | |
fi | |
rm -f "$TEMP_LOCAL_CURL_ZIP" # Clean up temporary file | |
if [[ -z "$LOCAL_MANIFEST_VERSION" ]]; then | |
echo "Local Endpoint Security manifest version: NOT FOUND" | |
else | |
echo "Local Endpoint Security manifest version: $LOCAL_MANIFEST_VERSION" | |
fi | |
# Compare versions and download if necessary | |
DOWNLOAD_REQUIRED="false" | |
if [[ -z "$LOCAL_MANIFEST_VERSION" ]]; then | |
echo "Local manifest not found. Download is required." | |
DOWNLOAD_REQUIRED="true" | |
elif [[ -n "$REMOTE_MANIFEST_VERSION" && "$REMOTE_MANIFEST_VERSION" != "$LOCAL_MANIFEST_VERSION" ]]; then | |
echo "Remote manifest version ($REMOTE_MANIFEST_VERSION) is different from local ($LOCAL_MANIFEST_VERSION). Download is required." | |
DOWNLOAD_REQUIRED="true" | |
else | |
echo "Local manifest is up-to-date ($LOCAL_MANIFEST_VERSION). Skipping download of manifest and granular artifacts." | |
fi | |
if [[ "$DOWNLOAD_REQUIRED" == "true" ]]; then | |
echo "Downloading latest Endpoint Security manifest from $REMOTE_MANIFEST_URL..." | |
sudo wget -q "$REMOTE_MANIFEST_URL" -O "$LOCAL_MANIFEST_PATH" | |
if [[ "$?" -ne 0 ]]; then | |
echo "Error: Could not download Endpoint Security manifest from remote. Granular artifacts will not be downloaded." | |
else | |
echo "Manifest downloaded successfully to $LOCAL_MANIFEST_PATH." | |
echo "Verifying newly downloaded manifest version:" | |
NEWLY_DOWNLOADED_VERSION=$(sudo unzip -p "$LOCAL_MANIFEST_PATH" manifest.json 2>/dev/null | jq -r .manifest_version 2>/dev/null) | |
if [[ -z "$NEWLY_DOWNLOADED_VERSION" ]]; then | |
echo "Warning: Could not read version from newly downloaded manifest. File might be corrupt." | |
else | |
echo "Newly downloaded manifest version: $NEWLY_DOWNLOADED_VERSION" | |
fi | |
echo "Extracting and downloading granular artifacts..." | |
# Using a subshell for cd to avoid affecting the main script's working directory | |
( cd "$MANIFEST_DIR" && \ | |
sudo unzip -p "$MANIFEST_ZIP_FILENAME" manifest.json | \ | |
sudo jq -r '.artifacts | to_entries[] | .value.relative_url' | \ | |
sudo xargs -I@ -P4 curl -s "https://artifacts.security.elastic.co@" --create-dirs -o "$ENDPOINT_DOWNLOAD_PATH/@" ) | |
XARGS_STATUS=$? | |
if [[ "$XARGS_STATUS" -ne 0 ]]; then | |
echo "Error: Failed to download some granular artifacts (xargs exit status: $XARGS_STATUS)." | |
else | |
echo "Finished downloading granular Endpoint Security artifacts to $ENDPOINT_DOWNLOAD_PATH" | |
fi | |
fi | |
fi | |
else | |
echo "Warning: Some dependencies are missing for Endpoint Security manifest check and granular download (curl, unzip, jq, wget, xargs). Skipping this section." | |
fi | |
# Download Endpoint Security Offline Package | |
download_endpoint_offline_package | |
# Download Kibana Plugin for Endpoint Security | |
download_kibana_plugin | |
# Download Fleet Server | |
download_asset fleet-server | |
# Download Prodfiler | |
echo "Downloading Prodfiler assets..." | |
PRODFILER_DOWNLOAD_DIR="$DOWNLPATH/downloads/prodfiler" | |
sudo mkdir -p "$PRODFILER_DOWNLOAD_DIR" | |
if [ $? -ne 0 ]; then echo "Error: Failed to create directory $PRODFILER_DOWNLOAD_DIR. Skipping Prodfiler download."; exit 1; fi | |
sudo chmod 755 "$PRODFILER_DOWNLOAD_DIR" | |
PRODFILER_BASE_URL="https://artifacts.elastic.co/downloads/prodfiler/" | |
# pf-host-agent | |
PF_HOST_AGENT_TAR="$PRODFILER_DOWNLOAD_DIR/pf-host-agent-$VERSI-linux-x86_64.tar.gz" | |
PF_HOST_AGENT_SHA="$PRODFILER_DOWNLOAD_DIR/pf-host-agent-$VERSI-linux-x86_64.tar.gz.sha512" | |
PF_HOST_AGENT_ASC="$PRODFILER_DOWNLOAD_DIR/pf-host-agent-$VERSI-linux-x86_64.tar.gz.asc" | |
if [ -f "$PF_HOST_AGENT_TAR" ]; then echo " $PF_HOST_AGENT_TAR already exists. Skipping download."; else sudo curl -o "$PF_HOST_AGENT_TAR" "${PRODFILER_BASE_URL}pf-host-agent-$VERSI-linux-x86_64.tar.gz" || echo "Error downloading pf-host-agent"; fi | |
if [ -f "$PF_HOST_AGENT_SHA" ]; then echo " $PF_HOST_AGENT_SHA already exists. Skipping download."; else sudo curl -o "$PF_HOST_AGENT_SHA" "${PRODFILER_BASE_URL}pf-host-agent-$VERSI-linux-x86_64.tar.gz.sha512" || echo "Error downloading pf-host-agent sha512"; fi | |
if [ -f "$PF_HOST_AGENT_ASC" ]; then echo " $PF_HOST_AGENT_ASC already exists. Skipping download."; else sudo curl -o "$PF_HOST_AGENT_ASC" "${PRODFILER_BASE_URL}pf-host-agent-$VERSI-linux-x86_64.tar.gz.asc" || echo "Error downloading pf-host-agent asc"; fi | |
# pf-elastic-collector | |
PF_COLLECTOR_TAR="$PRODFILER_DOWNLOAD_DIR/pf-elastic-collector-$VERSI-linux-x86_64.tar.gz" | |
PF_COLLECTOR_SHA="$PRODFILER_DOWNLOAD_DIR/pf-elastic-collector-$VERSI-linux-x86_64.tar.gz.sha512" | |
PF_COLLECTOR_ASC="$PRODFILER_DOWNLOAD_DIR/pf-elastic-collector-$VERSI-linux-x86_64.tar.gz.asc" | |
if [ -f "$PF_COLLECTOR_TAR" ]; then echo " $PF_COLLECTOR_TAR already exists. Skipping download."; else sudo curl -o "$PF_COLLECTOR_TAR" "${PRODFILER_BASE_URL}pf-elastic-collector-$VERSI-linux-x86_64.tar.gz" || echo "Error downloading pf-elastic-collector"; fi | |
if [ -f "$PF_COLLECTOR_SHA" ]; then echo " $PF_COLLECTOR_SHA already exists. Skipping download."; else sudo curl -o "$PF_COLLECTOR_SHA" "${PRODFILER_BASE_URL}pf-elastic-collector-$VERSI-linux-x86_64.tar.gz.sha512" || echo "Error downloading pf-elastic-collector sha512"; fi | |
if [ -f "$PF_COLLECTOR_ASC" ]; then echo " $PF_COLLECTOR_ASC already exists. Skipping download."; else sudo curl -o "$PF_COLLECTOR_ASC" "${PRODFILER_BASE_URL}pf-elastic-collector-$VERSI-linux-x86_64.tar.gz.asc" || echo "Error downloading pf-elastic-collector asc"; fi | |
# pf-elastic-symbolizer | |
PF_SYMBOLIZER_TAR="$PRODFILER_DOWNLOAD_DIR/pf-elastic-symbolizer-$VERSI-linux-x86_64.tar.gz" | |
PF_SYMBOLIZER_SHA="$PRODFILER_DOWNLOAD_DIR/pf-elastic-symbolizer-$VERSI-linux-x86_64.tar.gz.sha512" | |
PF_SYMBOLIZER_ASC="$PRODFILER_DOWNLOAD_DIR/pf-elastic-symbolizer-$VERSI-linux-x86_64.tar.gz.asc" | |
if [ -f "$PF_SYMBOLIZER_TAR" ]; then echo " $PF_SYMBOLIZER_TAR already exists. Skipping download."; else sudo curl -o "$PF_SYMBOLIZER_TAR" "${PRODFILER_BASE_URL}pf-elastic-symbolizer-$VERSI-linux-x86_64.tar.gz" || echo "Error downloading pf-elastic-symbolizer"; fi | |
if [ -f "$PF_SYMBOLIZER_SHA" ]; then echo " $PF_SYMBOLIZER_SHA already exists. Skipping download."; else sudo curl -o "$PF_SYMBOLIZER_SHA" "${PRODFILER_BASE_URL}pf-elastic-symbolizer-$VERSI-linux-x86_64.tar.gz.sha512" || echo "Error downloading pf-elastic-symbolizer sha512"; fi | |
if [ -f "$PF_SYMBOLIZER_ASC" ]; then echo " $PF_SYMBOLIZER_ASC already exists. Skipping download."; else sudo curl -o "$PF_SYMBOLIZER_ASC" "${PRODFILER_BASE_URL}pf-elastic-symbolizer-$VERSI-linux-x86_64.tar.gz.asc" || echo "Error downloading pf-elastic-symbolizer asc"; fi | |
# Download the specific Fleet Server PGP key to the specified directory | |
download_fleet_server_pgp_key | |
# --- Final File and Directory Permissions --- | |
echo "Setting final file and directory permissions..." | |
# Use sudo with find -exec for permissions | |
sudo find "$DOWNLPATH/downloads" -type f -exec chmod 644 {} \; | |
sudo find "$DOWNLPATH/downloads" -type d -exec chmod 755 {} \; | |
echo "Permissions set for all downloaded files and directories." | |
# --- Add Nginx Configuration --- | |
echo "Adding Nginx configuration..." | |
NGINX_CONF_DIR="/etc/nginx/conf.d" | |
ELASTIC_ASSETS_CONF_FILE="$NGINX_CONF_DIR/elastic_assets.conf" | |
NGINX_MAIN_CONF="/etc/nginx/nginx.conf" | |
ELASTIC_ASSETS_CONFIG="# set compatible etag format | |
map \$sent_http_etag \$elastic_etag { | |
\"~(.*)-(.*)\" \"\$1\$2\"; | |
} | |
server { | |
listen 80 reuseport; | |
server_name _ default_server; # You might want to adjust this | |
root $DOWNLPATH; # Root should be /usr/share/nginx/html to serve /downloads/ | |
location /downloads/ { # This location block is crucial for general downloads | |
alias $DOWNLPATH/downloads/; # Maps /downloads/ URL to the actual directory | |
try_files \$uri \$uri/ =404; | |
add_header ETag \"\$elastic_etag\"; | |
} | |
# Add a location block for the Fleet Server PGP key | |
# The Fleet Server endpoint is GET /api/agents/upgrades/{{major}}.{minor}.{{patch}}/pgp-public-key | |
# This mapping serves default.pgp for any version request matching the pattern | |
location ~ ^/api/agents/upgrades/([0-9]+\.[0-9]+\.[0-9]+)/pgp-public-key { | |
alias $DOWNLPATH/downloads/fleet-server/elastic-agent-upgrade-keys/default.pgp; | |
add_header Content-Type text/plain; # Or application/pgp-keys | |
add_header ETag \"\$elastic_etag\"; | |
} | |
# Optional: Default Nginx root for other requests if needed | |
location / { | |
# This can be your default Nginx welcome page or another application | |
root /usr/share/nginx/html; | |
index index.html index.htm; | |
try_files \$uri \$uri/ =404; | |
} | |
# favicon.ico | |
location = /favicon.ico { | |
log_not_found off; | |
} | |
# robots.txt | |
location = /robots.txt { | |
log_not_found off; | |
} | |
}" | |
# Check if the conf.d directory exists | |
if [[ -d "$NGINX_CONF_DIR" ]]; then | |
echo "Creating Nginx configuration file: $ELASTIC_ASSETS_CONF_FILE" | |
echo "$ELASTIC_ASSETS_CONFIG" | sudo tee "$ELASTIC_ASSETS_CONF_FILE" > /dev/null | |
else | |
echo "Warning: Directory $NGINX_CONF_DIR not found. Appending to main Nginx configuration: $NGINX_MAIN_CONF" | |
echo "$ELASTIC_ASSETS_CONFIG" | sudo tee -a "$NGINX_MAIN_CONF" > /dev/null | |
fi | |
# Test Nginx configuration before reloading | |
echo "Testing Nginx configuration..." | |
sudo nginx -t | |
if [[ "$?" -ne 0 ]]; then | |
echo "Error: Nginx configuration test failed. Please check the syntax above. Exiting." | |
exit 1 | |
fi | |
echo "Nginx configuration test successful." | |
# Reload Nginx to apply changes | |
if command -v systemctl &> /dev/null; then | |
sudo systemctl reload nginx | |
if [[ "$?" -eq 0 ]]; then | |
echo "Nginx reloaded successfully." | |
else | |
echo "Error reloading Nginx." | |
fi | |
elif command -v service &> /dev/null; then | |
sudo service nginx reload | |
if [[ "$?" -eq 0 ]]; then | |
echo "Nginx reloaded successfully." | |
else | |
echo "Error reloading Nginx." | |
fi | |
else | |
echo "Warning: Could not find systemctl or service command to reload Nginx. Please reload Nginx manually." | |
fi | |
echo "" | |
echo "-------------------------------------------------------------------------" | |
echo "Air-Gapped Installation Considerations:" | |
echo "-------------------------------------------------------------------------" | |
echo "1. Configure Elastic Agents to point to this internal artifact server." | |
echo " For binaries, set Fleet's 'Agent binary download URL' in Kibana to:" | |
echo " http://your_nginx_ip/downloads/beats/elastic-agent/" | |
echo " For PGP key verification during upgrades from Fleet Server, ensure your Fleet Server" | |
echo " is configured to check its local 'elastic-agent-upgrade-keys/default.pgp' file." | |
echo " (This script handles placing 'default.pgp' correctly)." | |
echo "2. Configure Fleet Server to be aware of this internal artifact server." | |
echo " If using server.pgp.upstream_url in Fleet Server config, ensure it's empty or points" | |
echo " to your internal Nginx if you choose to serve it this way, otherwise" | |
echo " the locally placed 'default.pgp' will be used." | |
echo "3. In Fleet, configure Endpoint Security policies to use the offline" | |
echo " package hosted on this internal server for updates. This might involve" | |
echo " setting appropriate URLs in Kibana's Fleet settings." | |
echo "4. Install the downloaded Kibana plugin for Endpoint Security in your Kibana instance." | |
echo "-------------------------------------------------------------------------" | |
echo "Artifacts have been downloaded to: $DOWNLPATH/downloads" | |
echo "You can access them via your Nginx server (e.g., http://your_nginx_ip/downloads/)." | |
echo "The Endpoint Security manifest is served via: $LOCAL_MIRROR_URL/downloads/endpoint/manifest/artifacts-$ENDPOINT_VERSION.zip" | |
echo "The Fleet Server PGP key is served via: $LOCAL_MIRROR_URL/api/agents/upgrades/X.x.x/pgp-public-key" # Updated to X.x.x | |
echo "-------------------------------------------------------------------------" | |
# --- Final Verification Summary --- | |
echo "" | |
echo "-------------------------------------------------------------------------" | |
echo "Final Verification Summary:" | |
echo "-------------------------------------------------------------------------" | |
echo "Binary Version (VERSI): $VERSI" | |
echo "Remote Endpoint Security Manifest Version: ${REMOTE_MANIFEST_VERSION:-'N/A (check logs above)'}" | |
echo "Local Endpoint Security Manifest Version: ${LOCAL_MANIFEST_VERSION:-'N/A (check logs above)'}" | |
echo "-------------------------------------------------------------------------" | |
exit |
Download Artifact for Air gap
Download GNUPG Public key and Binary for Elastic agent
Script to download manifest only
https://gist.github.com/linuxmalaysia/2ebf9814836736cccd87a2694886137f
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Bash script yang OK. Asal.