from bs4 import BeautifulSoup from urllib.parse import quote, unquote from utils import get_url import requests def get_files(site:dict, path:str) -> []: url = get_url(site) + quote(path) if (url in ["/../", "../"]): return ([]) response = requests.get(url) if (response.status_code != 200): print("connection:", response.reason) sys.exit(1) soup = BeautifulSoup(response.text, 'html.parser') files = [] for element in soup.findAll("a"): files.append(unquote(element["href"])) return (files) def get_uri(url: str) -> []: if (url in ["/../", "../"]): return ([]) try: response = requests.get(url) if (response.status_code != 200): print("connection:", response.reason) sys.exit(1) soup = BeautifulSoup(response.text, 'html.parser') return(soup.find("h1").text[9:]) except: return ("")