Created
January 28, 2025 14:33
-
-
Save HBIDamian/966435bcc9d14c58a72856a0a6d386c0 to your computer and use it in GitHub Desktop.
Code is very much a crude brute batch BDS downloader based on https://github.com/Bedrock-OSS/BDS-Versions.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import json | |
import requests | |
from time import sleep | |
from concurrent.futures import ThreadPoolExecutor, as_completed | |
# Constants | |
VERSIONS_URL = "https://raw.githubusercontent.com/Bedrock-OSS/BDS-Versions/main/versions.json" | |
VERSION_DETAILS_URL = "https://raw.githubusercontent.com/Bedrock-OSS/BDS-Versions/main/{platform}/{version}.json" | |
PREVIEW_BASE_URL = "https://raw.githubusercontent.com/Bedrock-OSS/BDS-Versions/main/{platform}_preview/{version}.json" | |
DOWNLOAD_DIR = "downloads" | |
TRACKING_FILE = "downloaded_versions.json" | |
MAX_RETRIES = 5 | |
INITIAL_BACKOFF = 5 # seconds | |
USER_AGENT = "BeeDeeEss Downloader Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36" | |
MAX_WORKERS = 4 # Number of parallel downloads | |
CHUNK_SIZE = 32 * 1024 # 32 KB chunks | |
# Headers | |
HEADERS = { | |
"User-Agent": USER_AGENT | |
} | |
# Load or create the tracking file | |
def load_tracking_file(): | |
if os.path.exists(TRACKING_FILE): | |
with open(TRACKING_FILE, "r") as f: | |
return json.load(f) | |
return {} | |
def save_tracking_file(data): | |
with open(TRACKING_FILE, "w") as f: | |
json.dump(data, f, indent=4) | |
# Download a file from a URL with exponential backoff | |
def download_file(url, destination): | |
if os.path.exists(destination): | |
print(f"File already exists: {destination}, skipping...") | |
return | |
attempt = 0 | |
backoff = INITIAL_BACKOFF | |
while attempt < MAX_RETRIES: | |
try: | |
print(f"Downloading: {url}") | |
response = requests.get(url, headers=HEADERS, stream=True, timeout=30) | |
response.raise_for_status() | |
with open(destination, "wb") as f: | |
for chunk in response.iter_content(chunk_size=CHUNK_SIZE): | |
f.write(chunk) | |
print(f"Downloaded to: {destination}") | |
return | |
except requests.RequestException as e: | |
attempt += 1 | |
print(f"Failed to download {url} (attempt {attempt}/{MAX_RETRIES}): {e}") | |
if attempt < MAX_RETRIES: | |
sleep(backoff) | |
backoff *= 2 # Exponential backoff | |
print(f"Failed to download {url} after {MAX_RETRIES} attempts. Skipping...") | |
# Process a single version download | |
def process_single_version(platform, version, version_type, base_dir, downloaded_versions): | |
if version in downloaded_versions.get(platform, {}).get(version_type, []): | |
print(f"Version {version} for {platform}/{version_type} already downloaded, skipping...") | |
return | |
# Determine URL based on version type | |
version_details_url = ( | |
PREVIEW_BASE_URL.format(platform=platform, version=version) | |
if version_type == "preview" | |
else VERSION_DETAILS_URL.format(platform=platform, version=version) | |
) | |
try: | |
response = requests.get(version_details_url, headers=HEADERS, timeout=15) | |
response.raise_for_status() | |
version_data = response.json() | |
download_url = version_data.get("download_url") | |
if not download_url: | |
print(f"No download_url found for {version} in {platform}/{version_type}, skipping...") | |
return | |
# Download the file | |
destination = os.path.join(base_dir, f"{version}.zip") | |
download_file(download_url, destination) | |
# Update tracking file | |
downloaded_versions.setdefault(platform, {}).setdefault(version_type, []).append(version) | |
save_tracking_file(downloaded_versions) | |
except requests.RequestException as e: | |
print(f"Failed to retrieve version details for {version} in {platform}/{version_type}: {e}") | |
# Process versions and download in parallel | |
def process_versions(platform, versions, version_type, downloaded_versions): | |
base_dir = os.path.join(DOWNLOAD_DIR, platform, version_type) | |
os.makedirs(base_dir, exist_ok=True) | |
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: | |
futures = [ | |
executor.submit( | |
process_single_version, | |
platform, | |
version, | |
version_type, | |
base_dir, | |
downloaded_versions, | |
) | |
for version in versions | |
] | |
for future in as_completed(futures): | |
future.result() | |
# Main script execution | |
def main(): | |
try: | |
# Load version data | |
response = requests.get(VERSIONS_URL, headers=HEADERS, timeout=15) | |
response.raise_for_status() | |
version_data = response.json() | |
except requests.RequestException as e: | |
print(f"Failed to fetch versions.json: {e}") | |
return | |
downloaded_versions = load_tracking_file() | |
# Process for both platforms and version types | |
for platform, details in version_data.items(): | |
process_versions(platform, details["versions"], "stable", downloaded_versions) | |
process_versions(platform, details["preview_versions"], "preview", downloaded_versions) | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment