mirror of
https://github.com/Laborratte5/linux-iso-seeder.git
synced 2025-12-30 17:08:17 +01:00
Create fetch_torrents.py
This commit is contained in:
parent
efa44eee86
commit
c9f23f83e7
1 changed files with 172 additions and 0 deletions
172
fetch_torrents.py
Normal file
172
fetch_torrents.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import logging
|
||||
import time
|
||||
import shutil
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# Configure logging
|
||||
log_file = "/logs/fetch_torrents.log"
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s %(levelname)s: %(message)s",
|
||||
handlers=[
|
||||
logging.StreamHandler(),
|
||||
logging.FileHandler(log_file)
|
||||
]
|
||||
)
|
||||
|
||||
watch_dir = "/watch"
|
||||
|
||||
def download_torrent(name, url):
|
||||
dest = os.path.join(watch_dir, f"{name}.torrent")
|
||||
try:
|
||||
logging.info(f"Fetching {url} ...")
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
with open(dest, "wb") as f:
|
||||
f.write(r.content)
|
||||
logging.info(f"Saved {dest}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to download {url}: {e}")
|
||||
return False
|
||||
|
||||
def fetch_ubuntu_lts():
|
||||
url = "https://releases.ubuntu.com/"
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
lts_versions = []
|
||||
for link in soup.find_all('a', href=True):
|
||||
href = link['href']
|
||||
if re.match(r"^\d{2}\.\d{2}/$", href) and "lts" in link.text.lower():
|
||||
lts_versions.append(href.strip("/"))
|
||||
|
||||
if not lts_versions:
|
||||
logging.warning("No Ubuntu LTS versions found.")
|
||||
return False
|
||||
|
||||
latest_lts = sorted(lts_versions, reverse=True)[0]
|
||||
torrent_url = f"https://releases.ubuntu.com/{latest_lts}/ubuntu-{latest_lts}-desktop-amd64.iso.torrent"
|
||||
return download_torrent(f"ubuntu-{latest_lts}", torrent_url)
|
||||
except Exception as e:
|
||||
logging.error(f"Ubuntu fetch error: {e}")
|
||||
return False
|
||||
|
||||
def fetch_debian_stable():
|
||||
url = "https://cdimage.debian.org/debian-cd/current/amd64/iso-dvd/"
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
for link in soup.find_all('a', href=True):
|
||||
href = link['href']
|
||||
if "-DVD-1.iso.torrent" in href:
|
||||
torrent_url = url + href
|
||||
name = href.replace(".iso.torrent", "")
|
||||
return download_torrent(name, torrent_url)
|
||||
logging.warning("No Debian DVD-1 torrent found.")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Debian fetch error: {e}")
|
||||
return False
|
||||
|
||||
def fetch_fedora_latest():
|
||||
url = "https://getfedora.org/en/workstation/download/"
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
for link in soup.find_all('a', href=True):
|
||||
href = link['href']
|
||||
if href.endswith(".torrent") and "Workstation" in href:
|
||||
torrent_url = href
|
||||
name = os.path.basename(href).replace(".torrent", "")
|
||||
return download_torrent(name, torrent_url)
|
||||
logging.warning("No Fedora torrent found.")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Fedora fetch error: {e}")
|
||||
return False
|
||||
|
||||
def fetch_arch_latest():
|
||||
torrent_url = "https://geo.mirror.pkgbuild.com/iso/latest/archlinux-x86_64.iso.torrent"
|
||||
return download_torrent("archlinux-latest", torrent_url)
|
||||
|
||||
def fetch_kali_latest():
|
||||
url = "https://www.kali.org/get-kali/#kali-installer-images"
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
for link in soup.find_all('a', href=True):
|
||||
href = link['href']
|
||||
if href.endswith(".torrent"):
|
||||
torrent_url = href
|
||||
name = os.path.basename(href).replace(".torrent", "")
|
||||
return download_torrent(name, torrent_url)
|
||||
logging.warning("No Kali torrent found.")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Kali fetch error: {e}")
|
||||
return False
|
||||
|
||||
def fetch_distrowatch_torrents():
|
||||
enable = os.getenv("ENABLE_DISTROWATCH", "false").lower() == "true"
|
||||
if not enable:
|
||||
logging.info("DistroWatch fetching disabled.")
|
||||
return False
|
||||
|
||||
filters = os.getenv("DISTROWATCH_FILTER", "").split(",")
|
||||
filters = [f.strip().lower() for f in filters if f.strip()]
|
||||
|
||||
url = "https://distrowatch.com/dwres.php?resource=torrents"
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
count = 0
|
||||
for link in soup.find_all('a', href=True):
|
||||
href = link['href']
|
||||
if href.endswith(".torrent"):
|
||||
distro_name = link.text.lower()
|
||||
if filters and not any(f in distro_name for f in filters):
|
||||
continue
|
||||
|
||||
torrent_url = href if href.startswith("http") else f"https://distrowatch.com/{href}"
|
||||
name = os.path.basename(torrent_url).replace(".torrent", "")
|
||||
if download_torrent(name, torrent_url):
|
||||
count += 1
|
||||
logging.info(f"DistroWatch torrents fetched: {count}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.error(f"DistroWatch fetch error: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_time = time.time()
|
||||
logging.info("Starting torrent fetch run.")
|
||||
|
||||
success_count = 0
|
||||
failure_count = 0
|
||||
|
||||
for func in [fetch_ubuntu_lts, fetch_debian_stable, fetch_fedora_latest, fetch_arch_latest, fetch_kali_latest, fetch_distrowatch_torrents]:
|
||||
if func():
|
||||
success_count += 1
|
||||
else:
|
||||
failure_count += 1
|
||||
|
||||
total, used, free = shutil.disk_usage("/downloads")
|
||||
logging.info(f"Downloads folder usage: {used // (2**30)} GB used / {total // (2**30)} GB total")
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
logging.info(f"Run complete in {elapsed:.2f} seconds. {success_count} successful, {failure_count} failed.")
|
||||
Loading…
Add table
Add a link
Reference in a new issue