core: use new param of menu to remove useless global variable

This commit is contained in:
starnakin 2023-05-31 15:58:59 +02:00
parent 55e67a5708
commit 2cd08d5fb0
2 changed files with 30 additions and 14 deletions

37
menu.py
View File

@ -1,5 +1,5 @@
from database import Database
from scrapper import get_files, get_uri, get_url
import scrapper
import utils
from utils import get
import platform
@ -12,9 +12,14 @@ uri = ""
preview_site = {}
preview_database = ""
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0):
menu = Menu(options, title, preview_body_function=preview_command, preview_ratio=preview_size, skip_empty_option=skip_empty_entries, cursor_pos=cursor_pos
)
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0, preview_args=None):
menu = Menu(options,
title,
preview_body_function=preview_command,
preview_ratio=preview_size,
skip_empty_option=skip_empty_entries,
cursor_pos=cursor_pos,
preview_args=preview_args)
return (menu.show());
def add_site(database: Database) -> dict:
@ -30,10 +35,10 @@ def add_site(database: Database) -> dict:
site.update({"name": name})
return (site)
def files_preview(filename: str) -> str:
if (not filename.endswith("/")):
def files_preview(url: str) -> str:
if (not url.endswith("/")):
return (None)
files = get_files(preview_site, uri + filename)
files = scrapper.get_files_by_url(url);
return ("\n".join(files))
def get_files_formated_by_viewing_data(database: Database, files: list, site: dict, path: str):
@ -51,17 +56,21 @@ def get_files_formated_by_viewing_data(database: Database, files: list, site: di
out.append(colorama.Fore.LIGHTBLACK_EX + file);
return (out);
def files(site:dict, path:str, database:Database, pos:int):
global uri
global preview_site
def get_files_path(current_url: str, files: list):
out = []
for file in files:
out.append(current_url + file);
return (out);
uri = path
preview_site = site
current_url = get_url(site, path);
files = get_files(site, path)
def files(site:dict, path:str, database:Database, pos:int):
current_url = scrapper.get_url(site, path);
files = scrapper.get_files(site, path)
files_displayed = get_files_formated_by_viewing_data(database, files, site, path)
preview_args = get_files_path(current_url, files)
choose = get_user_choice_by_menu(files_displayed,
preview_command=files_preview,
preview_args=preview_args,
preview_size=0.3,
show_search_hint=True,
title=f"Index of {path}",

View File

@ -1,6 +1,7 @@
from bs4 import BeautifulSoup
from urllib.parse import quote, unquote
import utils
import sys
import requests
def get_url(site: dict, path: str):
@ -9,11 +10,16 @@ def get_url(site: dict, path: str):
def get_files(site:dict, path:str) -> []:
url = get_url(site, path)
files = get_files_by_url(url)
return (files)
def get_files_by_url(url: str):
if (url in ["/../", "../"]):
return ([])
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
print(url)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
files = []
@ -21,6 +27,7 @@ def get_files(site:dict, path:str) -> []:
files.append(unquote(element["href"]))
return (files)
def get_uri(url: str) -> []:
if (url in ["/../", "../"]):
return ([])