Compare commits
5 Commits
06869df157
...
v1.1
Author | SHA1 | Date | |
---|---|---|---|
a874398629 | |||
8ea0ce9d20 | |||
2cd08d5fb0 | |||
55e67a5708 | |||
2e34ae394f |
@ -38,4 +38,8 @@ To get html with http requests
|
|||||||
### [TinyDB](https://pypi.org/project/tinydb/)
|
### [TinyDB](https://pypi.org/project/tinydb/)
|
||||||
To store credential data and url of the http server
|
To store credential data and url of the http server
|
||||||
### SubProcess and Os
|
### SubProcess and Os
|
||||||
To launch VLC
|
To launch VLC
|
||||||
|
### Sys
|
||||||
|
To exit when a http request failed
|
||||||
|
### [Colorama](https://github.com/tartley/colorama)
|
||||||
|
To color text in terminal
|
||||||
|
42
menu.py
42
menu.py
@ -1,5 +1,5 @@
|
|||||||
from database import Database
|
from database import Database
|
||||||
from scrapper import get_files, get_uri, get_url
|
import scrapper
|
||||||
import utils
|
import utils
|
||||||
from utils import get
|
from utils import get
|
||||||
import platform
|
import platform
|
||||||
@ -12,9 +12,14 @@ uri = ""
|
|||||||
preview_site = {}
|
preview_site = {}
|
||||||
preview_database = ""
|
preview_database = ""
|
||||||
|
|
||||||
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0):
|
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0, preview_args=None):
|
||||||
menu = Menu(options, title, preview_body_function=preview_command, preview_ratio=preview_size, skip_empty_option=skip_empty_entries, cursor_pos=cursor_pos
|
menu = Menu(options,
|
||||||
)
|
title,
|
||||||
|
preview_body_function=preview_command,
|
||||||
|
preview_ratio=preview_size,
|
||||||
|
skip_empty_option=skip_empty_entries,
|
||||||
|
cursor_pos=cursor_pos,
|
||||||
|
preview_args=preview_args)
|
||||||
return (menu.show());
|
return (menu.show());
|
||||||
|
|
||||||
def add_site(database: Database) -> dict:
|
def add_site(database: Database) -> dict:
|
||||||
@ -30,10 +35,10 @@ def add_site(database: Database) -> dict:
|
|||||||
site.update({"name": name})
|
site.update({"name": name})
|
||||||
return (site)
|
return (site)
|
||||||
|
|
||||||
def files_preview(filename: str) -> str:
|
def files_preview(url: str) -> str:
|
||||||
if (not filename.endswith("/")):
|
if (not url.endswith("/")):
|
||||||
return (None)
|
return (None)
|
||||||
files = get_files(preview_site, uri + filename)
|
files = scrapper.get_files_by_url(url);
|
||||||
return ("\n".join(files))
|
return ("\n".join(files))
|
||||||
|
|
||||||
def get_files_formated_by_viewing_data(database: Database, files: list, site: dict, path: str):
|
def get_files_formated_by_viewing_data(database: Database, files: list, site: dict, path: str):
|
||||||
@ -44,25 +49,28 @@ def get_files_formated_by_viewing_data(database: Database, files: list, site: di
|
|||||||
out.append(file);
|
out.append(file);
|
||||||
continue
|
continue
|
||||||
url = current_url + urllib.parse.quote(file)
|
url = current_url + urllib.parse.quote(file)
|
||||||
print (url)
|
|
||||||
viewing_data = database.get_viewing_data(url);
|
viewing_data = database.get_viewing_data(url);
|
||||||
if (viewing_data == None) or (viewing_data.get("finished") == False):
|
if (viewing_data == None) or (viewing_data.get("finished") == False):
|
||||||
out.append(colorama.Style.BRIGHT + file + colorama.Style.RESET_ALL)
|
out.append(colorama.Style.BRIGHT + file)
|
||||||
else:
|
else:
|
||||||
out.append(colorama.Fore.LIGHTBLACK_EX + file + colorama.Style.RESET_ALL);
|
out.append(colorama.Fore.LIGHTBLACK_EX + file);
|
||||||
|
return (out);
|
||||||
|
|
||||||
|
def get_files_path(current_url: str, files: list):
|
||||||
|
out = []
|
||||||
|
for file in files:
|
||||||
|
out.append(current_url + file);
|
||||||
return (out);
|
return (out);
|
||||||
|
|
||||||
def files(site:dict, path:str, database:Database, pos:int):
|
def files(site:dict, path:str, database:Database, pos:int):
|
||||||
global uri
|
|
||||||
global preview_site
|
current_url = scrapper.get_url(site, path);
|
||||||
|
files = scrapper.get_files(site, path)
|
||||||
uri = path
|
|
||||||
preview_site = site
|
|
||||||
current_url = get_url(site, path);
|
|
||||||
files = get_files(site, path)
|
|
||||||
files_displayed = get_files_formated_by_viewing_data(database, files, site, path)
|
files_displayed = get_files_formated_by_viewing_data(database, files, site, path)
|
||||||
|
preview_args = get_files_path(current_url, files)
|
||||||
choose = get_user_choice_by_menu(files_displayed,
|
choose = get_user_choice_by_menu(files_displayed,
|
||||||
preview_command=files_preview,
|
preview_command=files_preview,
|
||||||
|
preview_args=preview_args,
|
||||||
preview_size=0.3,
|
preview_size=0.3,
|
||||||
show_search_hint=True,
|
show_search_hint=True,
|
||||||
title=f"Index of {path}",
|
title=f"Index of {path}",
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
beautifulsoup4==4.12.2
|
beautifulsoup4==4.12.2
|
||||||
bs4==0.0.1
|
bs4==0.0.1
|
||||||
|
certifi==2023.5.7
|
||||||
|
charset-normalizer==3.1.0
|
||||||
|
colorama==0.4.6
|
||||||
getch==1.0
|
getch==1.0
|
||||||
|
idna==3.4
|
||||||
requests==2.30.0
|
requests==2.30.0
|
||||||
soupsieve==2.4.1
|
soupsieve==2.4.1
|
||||||
tinydb==4.7.1
|
tinydb==4.7.1
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from urllib.parse import quote, unquote
|
from urllib.parse import quote, unquote
|
||||||
import utils
|
import utils
|
||||||
|
import sys
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
def get_url(site: dict, path: str):
|
def get_url(site: dict, path: str):
|
||||||
@ -9,11 +10,16 @@ def get_url(site: dict, path: str):
|
|||||||
|
|
||||||
def get_files(site:dict, path:str) -> []:
|
def get_files(site:dict, path:str) -> []:
|
||||||
url = get_url(site, path)
|
url = get_url(site, path)
|
||||||
|
files = get_files_by_url(url)
|
||||||
|
return (files)
|
||||||
|
|
||||||
|
def get_files_by_url(url: str):
|
||||||
if (url in ["/../", "../"]):
|
if (url in ["/../", "../"]):
|
||||||
return ([])
|
return ([])
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
if (response.status_code != 200):
|
if (response.status_code != 200):
|
||||||
print("connection:", response.reason)
|
print("connection:", response.reason)
|
||||||
|
print(url)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
soup = BeautifulSoup(response.text, 'html.parser')
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||||||
files = []
|
files = []
|
||||||
@ -21,6 +27,7 @@ def get_files(site:dict, path:str) -> []:
|
|||||||
files.append(unquote(element["href"]))
|
files.append(unquote(element["href"]))
|
||||||
return (files)
|
return (files)
|
||||||
|
|
||||||
|
|
||||||
def get_uri(url: str) -> []:
|
def get_uri(url: str) -> []:
|
||||||
if (url in ["/../", "../"]):
|
if (url in ["/../", "../"]):
|
||||||
return ([])
|
return ([])
|
||||||
|
Reference in New Issue
Block a user