43 lines
1.1 KiB
Python
43 lines
1.1 KiB
Python
from bs4 import BeautifulSoup
|
|
from urllib.parse import quote, unquote
|
|
import utils
|
|
import sys
|
|
import requests
|
|
|
|
def get_url(site: dict, path: str):
|
|
url = utils.get_base_url(site) + quote(path)
|
|
return (url)
|
|
|
|
def get_files(site:dict, path:str) -> []:
|
|
url = get_url(site, path)
|
|
files = get_files_by_url(url)
|
|
return (files)
|
|
|
|
def get_files_by_url(url: str):
|
|
if (url in ["/../", "../"]):
|
|
return ([])
|
|
response = requests.get(url)
|
|
if (response.status_code != 200):
|
|
print("connection:", response.reason)
|
|
print(url)
|
|
sys.exit(1)
|
|
soup = BeautifulSoup(response.text, 'html.parser')
|
|
files = []
|
|
for element in soup.findAll("a"):
|
|
files.append(unquote(element["href"]))
|
|
return (files)
|
|
|
|
|
|
def get_uri(url: str) -> []:
|
|
if (url in ["/../", "../"]):
|
|
return ([])
|
|
try:
|
|
response = requests.get(url)
|
|
if (response.status_code != 200):
|
|
print("connection:", response.reason)
|
|
sys.exit(1)
|
|
soup = BeautifulSoup(response.text, 'html.parser')
|
|
return(soup.find("h1").text[9:])
|
|
except:
|
|
return ("")
|