This repository has been archived on 2022-09-12. You can view files and clone it, but cannot push or open issues or pull requests.
project305/main.py

71 lines
1.6 KiB
Python

from loguru import logger
import pretty_errors
from colorama import Fore, Back, Style, init
import pathlib
import sys
from sys import platform
import argparse
from argparse import ArgumentParser
import yaml
import asyncio
from scraper import Scraper
def init_argparser() -> ArgumentParser:
argparser = argparse.ArgumentParser(
description="List fish in aquarium.",
argument_default=argparse.SUPPRESS
)
argparser.add_argument(
"--config", "-c",
help="Path to the config file",
type=pathlib.Path,
default="config.yaml",
)
argparser.add_argument(
"--domains", "-d",
help="Path to the domains file",
type=pathlib.Path,
)
argparser.add_argument(
"--proxy", "-p",
help="Path to the proxy file",
type=pathlib.Path,
)
argparser.add_argument("--rps_min", help="", type=int)
argparser.add_argument("--rps_max", help="", type=int)
return argparser
def load_config() -> dict:
argparser = init_argparser()
args = vars(argparser.parse_args())
with open(args["config"]) as f:
config = yaml.safe_load(f)
config["settings"].update(args)
# Remove config path to pass config values to the Scraper
config["settings"].pop("config")
return config
async def main():
logger.add("project.log")
logger.info("Starting...")
if platform != "linux":
logger.critical("""\nNot for windows, run only on GNU/Linux!\n""")
input()
sys.exit(1)
config = load_config()
scraper = Scraper(**config["settings"])
asyncio.run(main())