- Copy/download
get_cloud.py
to/usr/local/bin
of the Proxmox node - Change common variables inside the script
chmod +x /usr/local/bin/get_cloud.py
- Copy and edit the
xxx.service
andxxx.timer
into the folder/etc/systemd/system
. Adjust the contents as appropriate (xxx
can be any string, provided that the names of the SystemD unit (xxx.service
) and the timer (xxx.timer
) is the same. The above files are provided as an example only) systemctl daemon-reload
systemctl enable xxx.timer && systemctl start xxx.timer
- To download and import the cloud image right away, run
systemctl start xxx.service
. You don't have to wait for the service to finish and can back out immediately by pressingCtrl+C
. - To view the output of the service run
journalctl -u xxx.service
Last active
August 29, 2024 05:32
-
-
Save kennethso168/b50628a0b5ddd30fce5bf8a34cddeb52 to your computer and use it in GitHub Desktop.
Download and import cloud images to existing Proxmox VM/templates
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env -S python3 -u | |
import hashlib | |
import os | |
import argparse | |
import requests | |
from pathlib import Path | |
from urllib.parse import urlparse | |
import subprocess | |
import sys | |
### COMMON VARIABLES, CHANGE AS APPROPRIATE! ### | |
# Proxmox VE storage id for the root disk of the VM | |
STORAGE_ID = "local-zfs" | |
# VM root disk protocol (ide/sata/scsi) and slot | |
ROOT_DISK_SLOT = "scsi0" | |
# VM root disk extra options, must start with a leading comma | |
ROOT_DISK_EXTRA_OPTIONS = ",iothread=1,ssd=1,discard=on" | |
# Folder for downloading the cloud image | |
# If this is a backing folder for Proxmox "Disk Image" storage | |
# You can list and delete the cloud image in the WebUI | |
DL_IMAGE_PATH = "/var/lib/vz/images/999999" | |
# Final size of the VM root disk | |
# The script will resize the disk to this size after importing | |
TARGET_ROOT_DISK_SIZE = "8G" | |
def _get_available_hash_algorithms(): | |
"""Return a dictionary of available hash function names and their associated function.""" | |
algorithms = {} | |
for algorithm_name in hashlib.algorithms_available: | |
algorithm_func = getattr(hashlib, algorithm_name, None) | |
if algorithm_func: | |
try: | |
# Make sure the algorithm is actually available for use. | |
# Not all algorithms listed as available are actually usable. | |
# For example, md5 is not available in FIPS mode. | |
algorithm_func() | |
except Exception: | |
pass | |
else: | |
algorithms[algorithm_name] = algorithm_func | |
return algorithms | |
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms() | |
def digest_from_file(filename, algorithm): | |
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. ''' | |
b_filename = filename | |
if not os.path.exists(b_filename): | |
return None | |
if os.path.isdir(b_filename): | |
raise TypeError("attempted to take checksum of directory: %s" % filename) | |
if hasattr(algorithm, 'hexdigest'): | |
digest_method = algorithm | |
else: | |
digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]() | |
blocksize = 64 * 1024 | |
infile = open(os.path.realpath(b_filename), 'rb') | |
block = infile.read(blocksize) | |
while block: | |
digest_method.update(block) | |
block = infile.read(blocksize) | |
infile.close() | |
return digest_method.hexdigest() | |
def checksum_matches(checksums_str, target_file, algorithm): | |
file_checksum = digest_from_file(target_file, algorithm) | |
sums = dict() | |
for line in checksums_str.splitlines(): | |
sums[line.split()[1].lstrip("*")] = line.split()[0] | |
return file_checksum == sums.get(Path(target_file).name) | |
def download_file(url, local_filename): | |
Path(local_filename).parent.mkdir(parents=True, exist_ok=True) | |
with requests.get(url, stream=True) as r: | |
r.raise_for_status() | |
with open(local_filename, 'wb') as f: | |
for chunk in r.iter_content(chunk_size=8192): | |
f.write(chunk) | |
return local_filename | |
parser = argparse.ArgumentParser( | |
description="Download and import cloud images", | |
epilog=f"Example: {os.path.basename(sys.argv[0])} sha512:https://cloud.debian.org/images/cloud/bookworm/latest/SHA512SUMS https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.qcow2 1001,1002" | |
) | |
parser.add_argument("checksum", help="Format: <algorithm>:<url>, where <url> is the URL to the SHAxxxSUMS file. Image file will only be downloaded if the existing file's checksum differs from that provided from the URL") | |
parser.add_argument("url", help="URL to the file image") | |
parser.add_argument("target_vmids", help="Comma separated list of VMIDs whose root disks will be replaced with the downloaded cloud image. WARNING: the old root disk will be DESTROYED") | |
if __name__ == '__main__': | |
args = parser.parse_args() | |
print("Checking checksum") | |
algo = args.checksum.split(":")[0] | |
base_name = os.path.basename(urlparse(args.url).path) | |
r = requests.get(args.checksum.split(":", 1)[1]) | |
r.raise_for_status() | |
if checksum_matches(r.text, str(Path(DL_IMAGE_PATH)/base_name), algo): | |
print("Checksum already matches, skipping download") | |
else: | |
print("Downloading image...") | |
download_file(args.url, Path(DL_IMAGE_PATH)/base_name) | |
print("Checking checksum") | |
if checksum_matches(r.text, str(Path(DL_IMAGE_PATH)/base_name), algo): | |
print("Download finished and checksum matches") | |
for id in args.target_vmids.split(","): | |
print(f"Updating disk for vmid {id}") | |
subprocess.run(["qm", "disk", "unlink", id, "--idlist", ROOT_DISK_SLOT, "--force"]) | |
subprocess.run(["qm", "set", id, f"--{ROOT_DISK_SLOT}", f"{STORAGE_ID}:0,import-from={DL_IMAGE_PATH}/{base_name}{ROOT_DISK_EXTRA_OPTIONS}"]) | |
subprocess.run(["qm", "set", id, "--boot", f"order={ROOT_DISK_SLOT}"]) | |
subprocess.run(["qm", "resize", id, ROOT_DISK_SLOT, TARGET_ROOT_DISK_SIZE]) | |
else: | |
raise AttributeError("Checksum doesn't match!") |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[Unit] | |
Description=Update and import cloud images (Debian Cloud) | |
[Service] | |
Type=oneshot | |
ExecStart=/usr/local/bin/get_cloud.py sha512:https://cloud.debian.org/images/cloud/bookworm/latest/SHA512SUMS https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-genericcloud-amd64.qcow2 1001 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[Unit] | |
Description=Run get cloud daily | |
[Timer] | |
OnCalendar=*-*-* 7:00:00 | |
[Install] | |
WantedBy=timers.target |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment