-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Fixed error if you unless you put https:// at the beginning of the link. - Added a debug mode for more information about downloading - Added logger - Now all site will saves in copied folder
- Loading branch information
1 parent
c3ed23a
commit ec675ed
Showing
6 changed files
with
142 additions
and
75 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
import argparse | ||
import asyncio | ||
from urllib.parse import urlparse | ||
from src.webcopy import download_page | ||
from src import logger | ||
|
||
async def main(): | ||
parser = argparse.ArgumentParser(description="Webcopy") | ||
parser.add_argument( | ||
"-u", "-l", "--url", "--link", type=str, help="The URL of the website to copy" | ||
) | ||
parser.add_argument( | ||
"-d", "-debug", "--debug", action="store_true", help="Enable debug mode" | ||
) | ||
args = parser.parse_args() | ||
url = args.url or input("Enter the URL of the website: ") | ||
debug = args.debug or False | ||
if not url.startswith(("http://", "https://")): | ||
url = "https://" + url | ||
|
||
folder_name = "copied/" + urlparse(url).netloc | ||
logger.info(f"Downloading website from {url} to {folder_name}") | ||
await download_page(url, folder_name, debug) | ||
|
||
|
||
if __name__ == "__main__": | ||
asyncio.run(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
from loguru import logger | ||
import sys | ||
|
||
|
||
def setup_logger(): | ||
info = "<blue>{time:HH:mm:ss}</blue> => <green>{message}</green>" | ||
error = "<red>{time:HH:mm:ss}</red> => <red>{message}</red>" | ||
debug = "<yellow>{time:HH:mm:ss}</yellow> => <yellow>{message}</yellow>" | ||
|
||
logger.remove() | ||
logger.add( | ||
sys.stdout, | ||
colorize=True, | ||
format=info, | ||
level="INFO", | ||
filter=lambda record: record["level"].name == "INFO", | ||
) | ||
logger.add( | ||
sys.stdout, | ||
colorize=True, | ||
format=error, | ||
level="ERROR", | ||
filter=lambda record: record["level"].name == "ERROR", | ||
) | ||
logger.add( | ||
sys.stdout, | ||
colorize=True, | ||
format=debug, | ||
level="DEBUG", | ||
filter=lambda record: record["level"].name == "DEBUG", | ||
) | ||
|
||
|
||
setup_logger() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
import os | ||
import aiohttp | ||
import asyncio | ||
from bs4 import BeautifulSoup | ||
from urllib.parse import urljoin, urlparse | ||
from . import logger | ||
|
||
|
||
async def create_directory_for_resource(resource_url, folder): | ||
parsed_url = urlparse(resource_url) | ||
resource_path = parsed_url.path.lstrip("/") | ||
resource_folder = os.path.join(folder, os.path.dirname(resource_path)) | ||
|
||
if not os.path.exists(resource_folder): | ||
os.makedirs(resource_folder) | ||
|
||
return os.path.join(resource_folder, os.path.basename(resource_path)) | ||
|
||
|
||
async def download_page(url, folder, debug): | ||
os.makedirs(folder, exist_ok=True) | ||
try: | ||
async with aiohttp.ClientSession() as session: | ||
async with session.get(url) as response: | ||
if response.status != 200: | ||
logger.error(f"Error fetching {url}") | ||
return | ||
|
||
content = await response.text() | ||
with open( | ||
os.path.join(folder, "index.html"), "w", encoding="utf-8" | ||
) as file: | ||
file.write(content) | ||
|
||
await download_resources( | ||
BeautifulSoup(content, "html.parser"), url, folder, session, debug | ||
) | ||
except Exception as e: | ||
logger.error(f"Error downloading page {url}: {e}") | ||
if debug: | ||
from traceback import format_exc | ||
|
||
logger.error(format_exc()) | ||
return | ||
|
||
logger.info(f"Page {url} downloaded successfully.") | ||
|
||
|
||
async def download_resources(soup, base_url, folder, session, debug): | ||
tasks = [ | ||
save_resource(urljoin(base_url, resource.get(attr)), folder, session, debug) | ||
for tag, attr in {"img": "src", "link": "href", "script": "src"}.items() | ||
for resource in soup.find_all(tag) | ||
if resource.get(attr) | ||
] | ||
debug and logger.debug(f"Downloading {len(tasks)} resources") | ||
await asyncio.gather(*tasks) | ||
|
||
|
||
async def save_resource(url, folder, session, debug): | ||
try: | ||
async with session.get(url) as response: | ||
if response.status == 200: | ||
resource_path = await create_directory_for_resource(url, folder) | ||
with open(resource_path, "wb") as file: | ||
file.write(await response.read()) | ||
debug and logger.debug(f"Resource {url} saved to {resource_path}") | ||
except Exception as e: | ||
logger.error(f"Error downloading resource {url}: {e}") |
This file was deleted.
Oops, something went wrong.