Compare commits

...

2 Commits

131
main.py
View File

@ -3,49 +3,124 @@ from bs4 import BeautifulSoup
import requests import requests
from simple_term_menu import TerminalMenu from simple_term_menu import TerminalMenu
import os import os
import sys
import subprocess import subprocess
from operator import itemgetter
db = TinyDB("./database.json", indent=4) def get_files(url: str) -> []:
query = Query() if (url in ["/../", "../"]):
return ([])
response = requests.get(url)
# print("connection:", response.reason)
if (response.status_code != 200):
exit
soup = BeautifulSoup(response.text, 'html.parser')
files = []
for element in soup.findAll("a"):
file = {}
file.update({"name": element.text})
file.update({"link": element["href"]})
files.append(file)
return (files)
sites = db.table("sites") def get_uri(url: str) -> []:
nb_sites = len(sites.all()); if (url in ["/../", "../"]):
site_choose = 0; return ([])
if (nb_sites == 0): response = requests.get(url)
# print("connection:", response.reason)
if (response.status_code != 200):
exit
soup = BeautifulSoup(response.text, 'html.parser')
return(soup.find("h1").text[9:])
def get(files: [], key: str):
names = []
for file in files:
names.append(file.get(key))
return (names)
def open_vlc(url: str) -> None:
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)
def preview(filename: str) -> str:
if (not filename.endswith("/")):
return (None)
files = get_files(url + filename)
return ("\n".join(get(files, "name")))
def files_navigator():
global url
path = cookies.get("path")
if (path):
url + path
else:
path = "/"
while True:
cookies.update({"last_path": path})
db.update({"last_path", path})
files = get_files(url)
terminal_menu = TerminalMenu(get(files, "name"), preview_command=preview, preview_size=0.3, show_search_hint=True, title=get_uri(url))
file_choose = terminal_menu.show()
if (file_choose == None):
sys.exit(0)
elif (file_choose == 0 and get_uri(url) == "/"):
return
file = get(files, "link")[file_choose]
if (not file.endswith("/")):
open_vlc(url + file)
else:
url = url + file
def add_site():
print("add a site:") print("add a site:")
site = {} site = {}
site.update({"url": input("url:")}) site.update({"url": input("url:")})
site.update({"user": input("user(leave blank):")}) site.update({"user": input("user(leave blank):")})
site.update({"password": input("password(leave blank):")}) site.update({"password": input("password(leave blank):")})
site.update({"id": len(sites_table.all)})
name = input("name[url] :") name = input("name[url] :")
if (name == ""): if (name == ""):
name = site.get("url") name = site.get("url")
site.update({"name": name}) site.update({"name": name})
sites.insert(site) sites_table.insert(site)
elif (nb_sites != 1):
lst = [] def set_url(site):
for site in sites.all(): global url
append(sites.get("name"))
terminal_menu = TerminalMenu(lst) cookies.update({"last_site": site})
site_choose = terminal_menu.show()
site = sites.all()[site_choose]
if (site.get("user") and site.get("password")): if (site.get("user") and site.get("password")):
url = f"http://{site.get('user')}:{site.get('password')}@{site.get('url')}/" url = f"http://{site.get('user')}:{site.get('password')}@{site.get('url')}/"
else: else:
url = f"http://{site.get('url')}/"; url = f"http://{site.get('url')}/";
def sites_navigator():
if (len(sites_table.all()) == 0):
add_site()
site = cookies.get("last_site")
if (site):
set_url(site)
files_navigator()
while True: while True:
response = requests.get(url) terminal_menu = TerminalMenu(get(sites_table.all(), "name") + ["", "add", "edit", "remove"], skip_empty_entries=True, show_search_hint=True)
print("connection:", response.reason) choose = terminal_menu.show()
if (response.status_code != 200): if (choose == len(sites_table.all()) + 1):
exit add_site();
soup = BeautifulSoup(response.text, 'html.parser') elif (choose == len(sites_table.all()) + 2):
lst = [] pass
for element in soup.findAll("a"): elif (choose == len(sites_table.all()) + 3):
lst.append(element.text) pass
terminal_menu = TerminalMenu(lst)
file = soup.findAll("a")[terminal_menu.show()]["href"]
if (not file.endswith("/")):
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url + file], stdout=devnull, stderr=subprocess.STDOUT)
else: else:
url = url + file set_url(sites_table.all()[choose])
db.update({"last_path", get_uri(url)})
files_navigator()
db = TinyDB("./database.json", indent=4)
query = Query()
sites_table = db.table("sites")
cookies_table = db.table("cookies")
if (len(cookies_table.all()) == 0):
cookies_table.insert({"id": 0})
cookies = cookies_table.all()[0]
sites_navigator();