Compare commits

...

24 Commits

Author SHA1 Message Date
a874398629 core: update requirements 2023-05-31 16:02:39 +02:00
8ea0ce9d20 core: update requirements 2023-05-31 16:01:37 +02:00
2cd08d5fb0 core: use new param of menu to remove useless global variable 2023-05-31 15:58:59 +02:00
55e67a5708 fix: remove print 2023-05-31 15:32:35 +02:00
2e34ae394f fix: remove print and remove a useless style color 2023-05-31 15:30:31 +02:00
06869df157 Merge remote-tracking branch 'refs/remotes/origin/main' 2023-05-31 15:28:56 +02:00
80481868db add: file already view is now in light grey 2023-05-31 15:28:47 +02:00
266f56f8e6 Update 'README.md' 2023-05-30 10:15:52 +00:00
a8279c146c add: save last cursor pos 2023-05-29 19:23:39 +02:00
3dd457c9dd Merge remote-tracking branch 'refs/remotes/origin/main' 2023-05-29 19:02:48 +02:00
03df28754a core: update: terminal_menu lib 2023-05-29 19:01:23 +02:00
d383d416ce Update 'README.md' 2023-05-27 22:18:35 +00:00
24a6dd1124 add subgit 2023-05-28 00:16:46 +02:00
445387dd80 add: requirements.txt 2023-05-28 00:13:16 +02:00
00881516b7 fix: crash: variable name error 2023-05-20 19:22:21 +02:00
36533495e7 fix: remove useless import 2023-05-20 19:17:43 +02:00
619427b81a fix: put import in a try 2023-05-20 19:15:19 +02:00
7a92e2c65f add: windows support 2023-05-20 19:04:18 +02:00
e14bb51127 clean: remove useless dependency 2023-05-20 18:36:40 +02:00
ac198c9cb0 fix: function missing arg 2023-05-17 11:14:23 +02:00
61f535fce4 fix: and clean 2023-05-17 11:08:40 +02:00
d39e9cfb62 fix: and clean 2023-05-16 23:25:05 +02:00
42c3f6b37d Merge remote-tracking branch 'refs/remotes/origin/main' 2023-05-16 22:33:53 +02:00
1838ac4850 fix: and clean 2023-05-16 22:33:36 +02:00
10 changed files with 355 additions and 144 deletions

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "terminal_menu"]
path = terminal_menu
url = git@git.chauvet.pro:starnakin/terminal_menu.git

View File

@ -20,6 +20,7 @@ cd VLC_HTTP_LAUNCHER
3. Install the dependencies: 3. Install the dependencies:
```bash ```bash
pip install -r requirements.txt pip install -r requirements.txt
pip install -r terminal_menu/requirements.txt
``` ```
## Usage ## Usage
@ -34,9 +35,11 @@ python3 main.py
To parse the html To parse the html
### Requests ### Requests
To get html with http requests To get html with http requests
### [simple-term-menu](https://pypi.org/project/simple-term-menu/)
To create navigate CLI menu
### [TinyDB](https://pypi.org/project/tinydb/) ### [TinyDB](https://pypi.org/project/tinydb/)
To store credential data and url of the http server To store credential data and url of the http server
### SubProcess and Os ### SubProcess and Os
To launch VLC To launch VLC
### Sys
To exit when a http request failed
### [Colorama](https://github.com/tartley/colorama)
To color text in terminal

69
database.py Normal file
View File

@ -0,0 +1,69 @@
from tinydb import TinyDB, Query
from tinydb.operations import set, delete
class Database():
def __init__(self):
self.db = TinyDB("database.json", indent=4)
self.query = Query()
self.cookies_table = self.db.table("cookies")
self.viewing_table = self.db.table("viewing")
self.sites_table = self.db.table("sites")
def get_viewing_data(self, url:str ):
return (self.viewing_table.get(self.query.url == url))
def set_viewing_data(self, url:str, viewing_data: dict):
self.viewing_table.update(set(url, value), self.query.url == url)
def add_viewing_data(self, viewing_data):
self.viewing_table.insert(viewing_data)
def get_sites(self):
return (self.sites_table.all())
def get_site_by_id(self, id: int):
return (self.sites_table.get(self.query.id == id))
def get_sites_table_len(self):
return (len(self.sites_table.all()))
def get_cookies_table_len(self):
return (len(self.cookies_table.all()))
def add_cookies(self, arg: dict):
self.cookies_table.insert(arg)
def get_cookies(self):
return (self.cookies_table.get(self.query.id == "0"))
def get_last_site_id(self):
cookies = self.get_cookies()
last_site_id = cookies.get("last_site")
return (last_site_id)
def get_last_site(self):
last_site_id = self.get_last_site_id()
last_site = self.get_site_by_id(last_site_id)
return (last_site)
def get_last_path(self):
cookies = self.get_cookies()
last_path = cookies.get("last_path")
return (last_path)
def get_last_cursor_pos(self):
cookies = self.get_cookies()
last_path = cookies.get("last_cursor_pos")
return (last_path)
def set_last_site(self, value):
self.cookies_table.update(set("last_site", value), self.query.id == "0")
def set_last_path(self, value):
self.cookies_table.update(set("last_path", value), self.query.id == "0")
def set_last_cursor_pos(self, value):
self.cookies_table.update(set("last_cursor_pos", value), self.query.id == "0")
def add_site(self, site):
self.sites_table.insert(site)

195
main.py
View File

@ -1,112 +1,38 @@
from tinydb import TinyDB, Query from urllib.parse import unquote, quote
from bs4 import BeautifulSoup from scrapper import get_uri, get_files
import requests from urllib.parse import quote, unquote
from simple_term_menu import TerminalMenu import player
import os from database import Database
import sys import utils
import subprocess import menu
from operator import itemgetter
from urllib.parse import unquote
from tinydb.operations import set, delete
def get_files(url: str) -> []: def files_navigator(site: dict):
if (url in ["/../", "../"]): if site.get("id") == database.get_last_site_id():
return ([]) path = database.get_last_path()
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
files = []
for element in soup.findAll("a"):
file = {}
file.update({"name": unquote(element['href'])})
file.update({"link": element["href"]})
files.append(file)
return (files)
def get_uri(url: str) -> []:
if (url in ["/../", "../"]):
return ([])
try:
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
return(soup.find("h1").text[9:])
except:
return ("")
def get(files: [], key: str):
names = []
for file in files:
names.append(file.get(key))
return (names)
def open_vlc(url: str) -> None:
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)
def files_preview(filename: str) -> str:
if (not filename.endswith("/")):
return (None)
files = get_files(url + filename)
return ("\n".join(get(files, "name")))
def files_navigator():
global url
path = cookies_table.get(query.id == "0").get("last_path")
if (path):
url = url + path
else: else:
url = url + "/" path = '/'
database.set_last_site(site.get("id"))
while True: while True:
cookies_table.update(set("last_path", get_uri(url)), query.id == "0") pos = database.get_last_cursor_pos()
try: path = get_uri(utils.get_base_url(site) + path)
files = get_files(url) database.set_last_path(path)
except: file = menu.files(site, path, database, pos)
print("Connection: ERROR") if (file == None):
return (1) return
terminal_menu = TerminalMenu(get(files, "name"), preview_command=files_preview, preview_size=0.3, show_search_hint=True, title=get_uri(url)) elif (file == "../" and path == "/"):
file_choose = terminal_menu.show()
if (file_choose == None):
sys.exit(0)
elif (file_choose == 0 and get_uri(url) == "/"):
return return
file = get(files, "link")[file_choose]
if (not file.endswith("/")): if (not file.endswith("/")):
open_vlc(url + file) player.play(utils.get_base_url(site) + quote(path + file), database)
else: else:
url = url + file path = path + file
def add_site(): def add_site(database: Database):
print("add a site:") site = menu.add_site(database)
site = {} database.add_site(site)
site.update({"url": input("url without protocol (ip:port):")})
site.update({"user": input("user(leave blank):")})
site.update({"password": input("password(leave blank):")})
site.update({"id": len(sites_table.all())})
name = input(f"name[{str(len(sites_table.all()))}] :")
if (name == ""):
name = str(len(sites_table.all()))
site.update({"name": name})
sites_table.insert(site)
def set_url(site):
global url
cookies_table.update(set("last_site", site.get("id")), query.id == "0")
if (site.get("user") and site.get("password")):
url = f"http://{site.get('user')}:{site.get('password')}@{site.get('url')}/"
else:
url = f"http://{site.get('url')}/";
def config_preview(site_name:str): def config_preview(site_name:str):
id = int(site_name.split(": ")[0]) id = int(site_name.split(": ")[0])
site = sites_table.get(query.id == id) site = database.get_site_by_id(id)
str = f""" str = f"""
url: {site.get('url')} url: {site.get('url')}
user: {site.get('user')} user: {site.get('user')}
@ -114,35 +40,32 @@ def config_preview(site_name:str):
""" """
return (str) return (str)
def sites_deleter(): def sites_deleter(database: Database):
lst = [] lst = []
for i in sites_table.all(): for i in database.get_sites():
lst.append(f"{str(i.get('id'))}: {i.get('name')}") lst.append(f"{str(i.get('id'))}: {i.get('name')}")
terminal_menu = TerminalMenu(lst, show_search_hint=True, preview_title="delete", preview_command=config_preview) choose = menu.site_deleter(lst, database)
choose = terminal_menu.show()
if (choose == None): if (choose == None):
return (1) return (1)
site_name = lst[choose] site_name = lst[choose]
id = int(site_name.split(": ")[0]) id = int(site_name.split(": ")[0])
sites_table.remove(query.id == id) sites_table.remove(query.id == id)
if (cookies_table.get(query.id == "0").get("last_site") == id): if (database.get_last_site() == id):
cookies_table.update(set("last_site", ""), query.id == choose.get("id")) database.set_last_path("")
cookies_table.update(set("last_path", ""), query.id == choose.get("id")) database.set_last_site("")
def sites_editor(): def sites_editor(database: Database):
lst = [] lst = []
for i in sites_table.all(): for i in database.get_sites():
lst.append(f"{str(i.get('id'))}: {i.get('name')}") lst.append(f"{str(i.get('id'))}: {i.get('name')}")
terminal_menu = TerminalMenu(lst, show_search_hint=True, preview_title="delete", preview_command=config_preview) choose = menu.site_editor(lst, database)
choose = terminal_menu.show()
if (choose == None): if (choose == None):
return (1) return (1)
site_name = lst[choose] site_name = lst[choose]
id = int(site_name.split(": ")[0]) id = int(site_name.split(": ")[0])
site = sites_table.get(query.id == id) site = database.get_site_by_id(id)
lst = [f"name: {site.get('name')}", f"url: {site.get('url')}", f"user: {site.get('user')}", f"password: {site.get('password')}"] lst = [f"name: {site.get('name')}", f"url: {site.get('url')}", f"user: {site.get('user')}", f"password: {site.get('password')}"]
terminal_menu = TerminalMenu(lst) choose = menu.param_editor(lst)
choose = terminal_menu.show()
if (choose == None): if (choose == None):
return (1); return (1);
for i in lst: for i in lst:
@ -156,36 +79,32 @@ def sites_editor():
elif (choose == 3): elif (choose == 3):
sites_table.update(set("password", input("password: ")), query.id == id) sites_table.update(set("password", input("password: ")), query.id == id)
def sites_navigator(): def sites_navigator(database: Database):
if (len(sites_table.all()) == 0): nb_site = database.get_sites_table_len()
add_site() if (nb_site == 0):
last_site_id = cookies_table.get(query.id == "0").get("last_site") add_site(database)
last_site = sites_table.get(query.id == last_site_id) nb_site = 1
last_site = database.get_last_site()
if (last_site != None): if (last_site != None):
set_url(last_site) files_navigator(last_site)
files_navigator()
while True: while True:
terminal_menu = TerminalMenu(get(sites_table.all(), "name") + ["", "add", "edit", "delete"], skip_empty_entries=True, show_search_hint=True) nb_site = database.get_sites_table_len()
choose = terminal_menu.show() choose = menu.sites(database.get_sites())
if (choose == None): if (choose == None):
return (1) return (1)
if (choose == len(sites_table.all()) + 1): if (choose == nb_site + 1):
add_site(); add_site(database);
elif (choose == len(sites_table.all()) + 2): elif (choose == nb_site + 2):
sites_editor() sites_editor(database)
elif (choose == len(sites_table.all()) + 3): elif (choose == nb_site + 3):
sites_deleter() sites_deleter(database)
sites_navigator() sites_navigator(database)
return return
else: else:
set_url(sites_table.all()[choose]) files_navigator(database.get_sites()[choose])
files_navigator()
db = TinyDB("./database.json", indent=4) database = Database()
query = Query()
sites_table = db.table("sites") if (database.get_cookies_table_len() == 0):
cookies_table = db.table("cookies") database.add_cookies({"last_path": "", "last_site": "", "last_cursor_pos": 0, "id": "0"})
if (len(cookies_table.all()) == 0): sites_navigator(database);
cookies_table.insert({"last_path": "", "last_site": "", "id": "0"})
sites_navigator();

127
menu.py Normal file
View File

@ -0,0 +1,127 @@
from database import Database
import scrapper
import utils
from utils import get
import platform
import urllib
import colorama
from terminal_menu.Menu import Menu
uri = ""
preview_site = {}
preview_database = ""
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0, preview_args=None):
menu = Menu(options,
title,
preview_body_function=preview_command,
preview_ratio=preview_size,
skip_empty_option=skip_empty_entries,
cursor_pos=cursor_pos,
preview_args=preview_args)
return (menu.show());
def add_site(database: Database) -> dict:
print("add a site:")
site = {}
site.update({"url": input("url without protocol (ip:port):")})
site.update({"user": input("user(leave blank):")})
site.update({"password": input("password(leave blank):")})
site.update({"id": database.get_sites_table_len()})
name = input(f"name[{str(database.get_sites_table_len())}]")
if (name == ""):
name = database.get_sites_table_len()
site.update({"name": name})
return (site)
def files_preview(url: str) -> str:
if (not url.endswith("/")):
return (None)
files = scrapper.get_files_by_url(url);
return ("\n".join(files))
def get_files_formated_by_viewing_data(database: Database, files: list, site: dict, path: str):
out = []
current_url = utils.get_base_url(site) + urllib.parse.quote(path);
for file in files:
if file == "..":
out.append(file);
continue
url = current_url + urllib.parse.quote(file)
viewing_data = database.get_viewing_data(url);
if (viewing_data == None) or (viewing_data.get("finished") == False):
out.append(colorama.Style.BRIGHT + file)
else:
out.append(colorama.Fore.LIGHTBLACK_EX + file);
return (out);
def get_files_path(current_url: str, files: list):
out = []
for file in files:
out.append(current_url + file);
return (out);
def files(site:dict, path:str, database:Database, pos:int):
current_url = scrapper.get_url(site, path);
files = scrapper.get_files(site, path)
files_displayed = get_files_formated_by_viewing_data(database, files, site, path)
preview_args = get_files_path(current_url, files)
choose = get_user_choice_by_menu(files_displayed,
preview_command=files_preview,
preview_args=preview_args,
preview_size=0.3,
show_search_hint=True,
title=f"Index of {path}",
cursor_pos = pos)
if (choose == None):
database.set_last_cursor_pos(0)
return (None)
database.set_last_cursor_pos(choose)
return (files[choose])
def config_preview(site_name:str):
id = int(site_name.split(": ")[0])
site = preview_database.get_site_by_id(id)
if (site == None):
return
str = f"""
url: {site.get('url')}
user: {site.get('user')}
password: {site.get('password')}
"""
return (str)
def site_deleter(sites, database: Database):
global preview_database
preview_database = database
choose = get_user_choice_by_menu(sites,
show_search_hint=True,
preview_title="delete",
preview_command=config_preview)
return (choose)
def param_editor(lst: list):
choose = get_user_choice_by_menu(lst)
return (choose)
def site_editor(lst: list, database: Database):
global preview_database
preview_database = database
choose = get_user_choice_by_menu(lst + ["", "quit"],
skip_empty_entries=True,
show_search_hint=True,
title="Edit")
if (choose == len(lst) + 1):
return (None)
return (choose)
def sites(sites: list):
nb_site = len(sites)
choose = get_user_choice_by_menu(get(sites, "name") + ["", "add", "edit", "delete"],
skip_empty_entries=True,
show_search_hint=True)
return (choose)

36
player.py Normal file
View File

@ -0,0 +1,36 @@
from database import Database
import menu
import terminal_menu.Menu as terminal_menu
from time import sleep
import os, subprocess
def _play(url: str, database: Database):
start_pos = 0
viewing_data = database.get_viewing_data(url)
if (viewing_data != None):
response = menu.start_pos([f"go back to {str(viewing_data.get('last_pos'))}", "restart from 0:00"])
if (response == None):
return (1);
elif (response == 0):
start_pos = viewing_data.get("last_pos")
vlc_instance = vlc.Instance()
player = vlc_instance.media_player_new()
media = vlc_instance.media_new(url)
player.set_media(media)
player.play()
sleep(1)
player.video_set_mouse_input(True)
player.set_position(start_pos / player.get_length())
player.set_fullscreen(True)
while True:
print("duration:", player.get_time(),":",player.get_length())
def play(url: str, database: Database):
viewing_data = database.get_viewing_data(url);
if (viewing_data == None):
viewing_data = {"url": url, "finished": True}
database.add_viewing_data(viewing_data);
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)

View File

@ -2,11 +2,10 @@ beautifulsoup4==4.12.2
bs4==0.0.1 bs4==0.0.1
certifi==2023.5.7 certifi==2023.5.7
charset-normalizer==3.1.0 charset-normalizer==3.1.0
enzyme==0.4.1 colorama==0.4.6
getch==1.0
idna==3.4 idna==3.4
install==1.3.5
requests==2.30.0 requests==2.30.0
simple-term-menu==1.6.1
soupsieve==2.4.1 soupsieve==2.4.1
tinydb==4.7.1 tinydb==4.7.1
urllib3==2.0.2 urllib3==2.0.2

42
scrapper.py Normal file
View File

@ -0,0 +1,42 @@
from bs4 import BeautifulSoup
from urllib.parse import quote, unquote
import utils
import sys
import requests
def get_url(site: dict, path: str):
url = utils.get_base_url(site) + quote(path)
return (url)
def get_files(site:dict, path:str) -> []:
url = get_url(site, path)
files = get_files_by_url(url)
return (files)
def get_files_by_url(url: str):
if (url in ["/../", "../"]):
return ([])
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
print(url)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
files = []
for element in soup.findAll("a"):
files.append(unquote(element["href"]))
return (files)
def get_uri(url: str) -> []:
if (url in ["/../", "../"]):
return ([])
try:
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
return(soup.find("h1").text[9:])
except:
return ("")

1
terminal_menu Submodule

Submodule terminal_menu added at 5c07857c79

12
utils.py Normal file
View File

@ -0,0 +1,12 @@
def get_base_url(site: dict):
if (site.get("user") == "" and site.get("password") == ""):
return (f"http://{site.get('url')}")
else:
return (f"http://{site.get('user')}:{site.get('password')}@{site.get('url')}")
def get(dictionnarys: list, key: str):
lst = []
for dictionnary in dictionnarys:
lst.append(dictionnary.get(key))
return (lst)