8 Commits
v1.0 ... main

8 changed files with 81 additions and 24 deletions

View File

@ -35,9 +35,11 @@ python3 main.py
To parse the html To parse the html
### Requests ### Requests
To get html with http requests To get html with http requests
### [simple-term-menu](https://pypi.org/project/simple-term-menu/)
To create navigate CLI menu
### [TinyDB](https://pypi.org/project/tinydb/) ### [TinyDB](https://pypi.org/project/tinydb/)
To store credential data and url of the http server To store credential data and url of the http server
### SubProcess and Os ### SubProcess and Os
To launch VLC To launch VLC
### Sys
To exit when a http request failed
### [Colorama](https://github.com/tartley/colorama)
To color text in terminal

View File

@ -11,6 +11,12 @@ class Database():
def get_viewing_data(self, url:str ): def get_viewing_data(self, url:str ):
return (self.viewing_table.get(self.query.url == url)) return (self.viewing_table.get(self.query.url == url))
def set_viewing_data(self, url:str, viewing_data: dict):
self.viewing_table.update(set(url, value), self.query.url == url)
def add_viewing_data(self, viewing_data):
self.viewing_table.insert(viewing_data)
def get_sites(self): def get_sites(self):
return (self.sites_table.all()) return (self.sites_table.all())

View File

@ -3,7 +3,7 @@ from scrapper import get_uri, get_files
from urllib.parse import quote, unquote from urllib.parse import quote, unquote
import player import player
from database import Database from database import Database
from utils import get_url import utils
import menu import menu
def files_navigator(site: dict): def files_navigator(site: dict):
@ -14,7 +14,7 @@ def files_navigator(site: dict):
database.set_last_site(site.get("id")) database.set_last_site(site.get("id"))
while True: while True:
pos = database.get_last_cursor_pos() pos = database.get_last_cursor_pos()
path = get_uri(get_url(site) + path) path = get_uri(utils.get_base_url(site) + path)
database.set_last_path(path) database.set_last_path(path)
file = menu.files(site, path, database, pos) file = menu.files(site, path, database, pos)
if (file == None): if (file == None):
@ -22,7 +22,7 @@ def files_navigator(site: dict):
elif (file == "../" and path == "/"): elif (file == "../" and path == "/"):
return return
if (not file.endswith("/")): if (not file.endswith("/")):
player.play(get_url(site) + quote(path + file), database) player.play(utils.get_base_url(site) + quote(path + file), database)
else: else:
path = path + file path = path + file

59
menu.py
View File

@ -1,7 +1,10 @@
from database import Database from database import Database
from scrapper import get_files, get_uri import scrapper
from utils import get_url, get import utils
from utils import get
import platform import platform
import urllib
import colorama
from terminal_menu.Menu import Menu from terminal_menu.Menu import Menu
@ -9,9 +12,14 @@ uri = ""
preview_site = {} preview_site = {}
preview_database = "" preview_database = ""
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0): def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0, preview_args=None):
menu = Menu(options, title, preview_body_function=preview_command, preview_ratio=preview_size, skip_empty_option=skip_empty_entries, cursor_pos=cursor_pos menu = Menu(options,
) title,
preview_body_function=preview_command,
preview_ratio=preview_size,
skip_empty_option=skip_empty_entries,
cursor_pos=cursor_pos,
preview_args=preview_args)
return (menu.show()); return (menu.show());
def add_site(database: Database) -> dict: def add_site(database: Database) -> dict:
@ -27,21 +35,42 @@ def add_site(database: Database) -> dict:
site.update({"name": name}) site.update({"name": name})
return (site) return (site)
def files_preview(filename: str) -> str: def files_preview(url: str) -> str:
if (not filename.endswith("/")): if (not url.endswith("/")):
return (None) return (None)
files = get_files(preview_site, uri + filename) files = scrapper.get_files_by_url(url);
return ("\n".join(files)) return ("\n".join(files))
def files(site:dict, path:str, database:Database, pos:int): def get_files_formated_by_viewing_data(database: Database, files: list, site: dict, path: str):
global uri out = []
global preview_site current_url = utils.get_base_url(site) + urllib.parse.quote(path);
for file in files:
if file == "..":
out.append(file);
continue
url = current_url + urllib.parse.quote(file)
viewing_data = database.get_viewing_data(url);
if (viewing_data == None) or (viewing_data.get("finished") == False):
out.append(colorama.Style.BRIGHT + file)
else:
out.append(colorama.Fore.LIGHTBLACK_EX + file);
return (out);
uri = path def get_files_path(current_url: str, files: list):
preview_site = site out = []
files = get_files(site, path) for file in files:
choose = get_user_choice_by_menu(files, out.append(current_url + file);
return (out);
def files(site:dict, path:str, database:Database, pos:int):
current_url = scrapper.get_url(site, path);
files = scrapper.get_files(site, path)
files_displayed = get_files_formated_by_viewing_data(database, files, site, path)
preview_args = get_files_path(current_url, files)
choose = get_user_choice_by_menu(files_displayed,
preview_command=files_preview, preview_command=files_preview,
preview_args=preview_args,
preview_size=0.3, preview_size=0.3,
show_search_hint=True, show_search_hint=True,
title=f"Index of {path}", title=f"Index of {path}",

View File

@ -1,5 +1,6 @@
from database import Database from database import Database
import menu import menu
import terminal_menu.Menu as terminal_menu
from time import sleep from time import sleep
import os, subprocess import os, subprocess
@ -26,6 +27,10 @@ def _play(url: str, database: Database):
def play(url: str, database: Database): def play(url: str, database: Database):
viewing_data = database.get_viewing_data(url);
if (viewing_data == None):
viewing_data = {"url": url, "finished": True}
database.add_viewing_data(viewing_data);
with open(os.devnull, 'wb') as devnull: with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT) subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)

View File

@ -1,6 +1,10 @@
beautifulsoup4==4.12.2 beautifulsoup4==4.12.2
bs4==0.0.1 bs4==0.0.1
certifi==2023.5.7
charset-normalizer==3.1.0
colorama==0.4.6
getch==1.0 getch==1.0
idna==3.4
requests==2.30.0 requests==2.30.0
soupsieve==2.4.1 soupsieve==2.4.1
tinydb==4.7.1 tinydb==4.7.1

View File

@ -1,15 +1,25 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from urllib.parse import quote, unquote from urllib.parse import quote, unquote
from utils import get_url import utils
import sys
import requests import requests
def get_url(site: dict, path: str):
url = utils.get_base_url(site) + quote(path)
return (url)
def get_files(site:dict, path:str) -> []: def get_files(site:dict, path:str) -> []:
url = get_url(site) + quote(path) url = get_url(site, path)
files = get_files_by_url(url)
return (files)
def get_files_by_url(url: str):
if (url in ["/../", "../"]): if (url in ["/../", "../"]):
return ([]) return ([])
response = requests.get(url) response = requests.get(url)
if (response.status_code != 200): if (response.status_code != 200):
print("connection:", response.reason) print("connection:", response.reason)
print(url)
sys.exit(1) sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser') soup = BeautifulSoup(response.text, 'html.parser')
files = [] files = []
@ -17,6 +27,7 @@ def get_files(site:dict, path:str) -> []:
files.append(unquote(element["href"])) files.append(unquote(element["href"]))
return (files) return (files)
def get_uri(url: str) -> []: def get_uri(url: str) -> []:
if (url in ["/../", "../"]): if (url in ["/../", "../"]):
return ([]) return ([])

View File

@ -1,5 +1,5 @@
def get_url(site: dict): def get_base_url(site: dict):
if (site.get("user") == "" and site.get("password") == ""): if (site.get("user") == "" and site.get("password") == ""):
return (f"http://{site.get('url')}") return (f"http://{site.get('url')}")
else: else: