Files
posterg-website/data/cover/scrape.py

40 lines
1.1 KiB
Python

import os
import sys
import requests
from bs4 import BeautifulSoup
from urllib.parse import urljoin, urlparse
import urllib.request
def main(url):
try:
response = requests.get(url, headers={"User-Agent": "Mozilla/5.0"})
response.raise_for_status()
except requests.exceptions.RequestException as e:
print(f"Error: {e}")
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
images = soup.find_all('img')
if not os.path.exists("images"):
os.makedirs("images")
for image in images:
img_src = image.get('src')
img_url = urljoin(url, img_src)
local_filename = urlparse(img_url).path.split('/')[-1]
try:
urllib.request.urlretrieve(img_url, os.path.join("images", local_filename))
print(f"Downloaded {img_url}")
except Exception as e:
print(f"Error downloading {img_url}: {e}")
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python script.py <URL>")
sys.exit(1)
url = sys.argv[1]
main(url)