Compare commits

...

16 Commits

Author SHA1 Message Date
a8279c146c add: save last cursor pos 2023-05-29 19:23:39 +02:00
3dd457c9dd Merge remote-tracking branch 'refs/remotes/origin/main' 2023-05-29 19:02:48 +02:00
03df28754a core: update: terminal_menu lib 2023-05-29 19:01:23 +02:00
d383d416ce Update 'README.md' 2023-05-27 22:18:35 +00:00
24a6dd1124 add subgit 2023-05-28 00:16:46 +02:00
445387dd80 add: requirements.txt 2023-05-28 00:13:16 +02:00
00881516b7 fix: crash: variable name error 2023-05-20 19:22:21 +02:00
36533495e7 fix: remove useless import 2023-05-20 19:17:43 +02:00
619427b81a fix: put import in a try 2023-05-20 19:15:19 +02:00
7a92e2c65f add: windows support 2023-05-20 19:04:18 +02:00
e14bb51127 clean: remove useless dependency 2023-05-20 18:36:40 +02:00
ac198c9cb0 fix: function missing arg 2023-05-17 11:14:23 +02:00
61f535fce4 fix: and clean 2023-05-17 11:08:40 +02:00
d39e9cfb62 fix: and clean 2023-05-16 23:25:05 +02:00
42c3f6b37d Merge remote-tracking branch 'refs/remotes/origin/main' 2023-05-16 22:33:53 +02:00
1838ac4850 fix: and clean 2023-05-16 22:33:36 +02:00
10 changed files with 298 additions and 144 deletions

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "terminal_menu"]
path = terminal_menu
url = git@git.chauvet.pro:starnakin/terminal_menu.git

View File

@ -20,6 +20,7 @@ cd VLC_HTTP_LAUNCHER
3. Install the dependencies:
```bash
pip install -r requirements.txt
pip install -r terminal_menu/requirements.txt
```
## Usage

63
database.py Normal file
View File

@ -0,0 +1,63 @@
from tinydb import TinyDB, Query
from tinydb.operations import set, delete
class Database():
def __init__(self):
self.db = TinyDB("database.json", indent=4)
self.query = Query()
self.cookies_table = self.db.table("cookies")
self.viewing_table = self.db.table("viewing")
self.sites_table = self.db.table("sites")
def get_viewing_data(self, url:str ):
return (self.viewing_table.get(self.query.url == url))
def get_sites(self):
return (self.sites_table.all())
def get_site_by_id(self, id: int):
return (self.sites_table.get(self.query.id == id))
def get_sites_table_len(self):
return (len(self.sites_table.all()))
def get_cookies_table_len(self):
return (len(self.cookies_table.all()))
def add_cookies(self, arg: dict):
self.cookies_table.insert(arg)
def get_cookies(self):
return (self.cookies_table.get(self.query.id == "0"))
def get_last_site_id(self):
cookies = self.get_cookies()
last_site_id = cookies.get("last_site")
return (last_site_id)
def get_last_site(self):
last_site_id = self.get_last_site_id()
last_site = self.get_site_by_id(last_site_id)
return (last_site)
def get_last_path(self):
cookies = self.get_cookies()
last_path = cookies.get("last_path")
return (last_path)
def get_last_cursor_pos(self):
cookies = self.get_cookies()
last_path = cookies.get("last_cursor_pos")
return (last_path)
def set_last_site(self, value):
self.cookies_table.update(set("last_site", value), self.query.id == "0")
def set_last_path(self, value):
self.cookies_table.update(set("last_path", value), self.query.id == "0")
def set_last_cursor_pos(self, value):
self.cookies_table.update(set("last_cursor_pos", value), self.query.id == "0")
def add_site(self, site):
self.sites_table.insert(site)

195
main.py
View File

@ -1,112 +1,38 @@
from tinydb import TinyDB, Query
from bs4 import BeautifulSoup
import requests
from simple_term_menu import TerminalMenu
import os
import sys
import subprocess
from operator import itemgetter
from urllib.parse import unquote
from tinydb.operations import set, delete
from urllib.parse import unquote, quote
from scrapper import get_uri, get_files
from urllib.parse import quote, unquote
import player
from database import Database
from utils import get_url
import menu
def get_files(url: str) -> []:
if (url in ["/../", "../"]):
return ([])
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
files = []
for element in soup.findAll("a"):
file = {}
file.update({"name": unquote(element['href'])})
file.update({"link": element["href"]})
files.append(file)
return (files)
def get_uri(url: str) -> []:
if (url in ["/../", "../"]):
return ([])
try:
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
return(soup.find("h1").text[9:])
except:
return ("")
def get(files: [], key: str):
names = []
for file in files:
names.append(file.get(key))
return (names)
def open_vlc(url: str) -> None:
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)
def files_preview(filename: str) -> str:
if (not filename.endswith("/")):
return (None)
files = get_files(url + filename)
return ("\n".join(get(files, "name")))
def files_navigator():
global url
path = cookies_table.get(query.id == "0").get("last_path")
if (path):
url = url + path
def files_navigator(site: dict):
if site.get("id") == database.get_last_site_id():
path = database.get_last_path()
else:
url = url + "/"
path = '/'
database.set_last_site(site.get("id"))
while True:
cookies_table.update(set("last_path", get_uri(url)), query.id == "0")
try:
files = get_files(url)
except:
print("Connection: ERROR")
return (1)
terminal_menu = TerminalMenu(get(files, "name"), preview_command=files_preview, preview_size=0.3, show_search_hint=True, title=get_uri(url))
file_choose = terminal_menu.show()
if (file_choose == None):
sys.exit(0)
elif (file_choose == 0 and get_uri(url) == "/"):
pos = database.get_last_cursor_pos()
path = get_uri(get_url(site) + path)
database.set_last_path(path)
file = menu.files(site, path, database, pos)
if (file == None):
return
elif (file == "../" and path == "/"):
return
file = get(files, "link")[file_choose]
if (not file.endswith("/")):
open_vlc(url + file)
player.play(get_url(site) + quote(path + file), database)
else:
url = url + file
path = path + file
def add_site():
print("add a site:")
site = {}
site.update({"url": input("url without protocol (ip:port):")})
site.update({"user": input("user(leave blank):")})
site.update({"password": input("password(leave blank):")})
site.update({"id": len(sites_table.all())})
name = input(f"name[{str(len(sites_table.all()))}] :")
if (name == ""):
name = str(len(sites_table.all()))
site.update({"name": name})
sites_table.insert(site)
def set_url(site):
global url
cookies_table.update(set("last_site", site.get("id")), query.id == "0")
if (site.get("user") and site.get("password")):
url = f"http://{site.get('user')}:{site.get('password')}@{site.get('url')}/"
else:
url = f"http://{site.get('url')}/";
def add_site(database: Database):
site = menu.add_site(database)
database.add_site(site)
def config_preview(site_name:str):
id = int(site_name.split(": ")[0])
site = sites_table.get(query.id == id)
site = database.get_site_by_id(id)
str = f"""
url: {site.get('url')}
user: {site.get('user')}
@ -114,35 +40,32 @@ def config_preview(site_name:str):
"""
return (str)
def sites_deleter():
def sites_deleter(database: Database):
lst = []
for i in sites_table.all():
for i in database.get_sites():
lst.append(f"{str(i.get('id'))}: {i.get('name')}")
terminal_menu = TerminalMenu(lst, show_search_hint=True, preview_title="delete", preview_command=config_preview)
choose = terminal_menu.show()
choose = menu.site_deleter(lst, database)
if (choose == None):
return (1)
site_name = lst[choose]
id = int(site_name.split(": ")[0])
sites_table.remove(query.id == id)
if (cookies_table.get(query.id == "0").get("last_site") == id):
cookies_table.update(set("last_site", ""), query.id == choose.get("id"))
cookies_table.update(set("last_path", ""), query.id == choose.get("id"))
if (database.get_last_site() == id):
database.set_last_path("")
database.set_last_site("")
def sites_editor():
def sites_editor(database: Database):
lst = []
for i in sites_table.all():
for i in database.get_sites():
lst.append(f"{str(i.get('id'))}: {i.get('name')}")
terminal_menu = TerminalMenu(lst, show_search_hint=True, preview_title="delete", preview_command=config_preview)
choose = terminal_menu.show()
choose = menu.site_editor(lst, database)
if (choose == None):
return (1)
site_name = lst[choose]
id = int(site_name.split(": ")[0])
site = sites_table.get(query.id == id)
site = database.get_site_by_id(id)
lst = [f"name: {site.get('name')}", f"url: {site.get('url')}", f"user: {site.get('user')}", f"password: {site.get('password')}"]
terminal_menu = TerminalMenu(lst)
choose = terminal_menu.show()
choose = menu.param_editor(lst)
if (choose == None):
return (1);
for i in lst:
@ -156,36 +79,32 @@ def sites_editor():
elif (choose == 3):
sites_table.update(set("password", input("password: ")), query.id == id)
def sites_navigator():
if (len(sites_table.all()) == 0):
add_site()
last_site_id = cookies_table.get(query.id == "0").get("last_site")
last_site = sites_table.get(query.id == last_site_id)
def sites_navigator(database: Database):
nb_site = database.get_sites_table_len()
if (nb_site == 0):
add_site(database)
nb_site = 1
last_site = database.get_last_site()
if (last_site != None):
set_url(last_site)
files_navigator()
files_navigator(last_site)
while True:
terminal_menu = TerminalMenu(get(sites_table.all(), "name") + ["", "add", "edit", "delete"], skip_empty_entries=True, show_search_hint=True)
choose = terminal_menu.show()
nb_site = database.get_sites_table_len()
choose = menu.sites(database.get_sites())
if (choose == None):
return (1)
if (choose == len(sites_table.all()) + 1):
add_site();
elif (choose == len(sites_table.all()) + 2):
sites_editor()
elif (choose == len(sites_table.all()) + 3):
sites_deleter()
sites_navigator()
if (choose == nb_site + 1):
add_site(database);
elif (choose == nb_site + 2):
sites_editor(database)
elif (choose == nb_site + 3):
sites_deleter(database)
sites_navigator(database)
return
else:
set_url(sites_table.all()[choose])
files_navigator()
files_navigator(database.get_sites()[choose])
db = TinyDB("./database.json", indent=4)
query = Query()
database = Database()
sites_table = db.table("sites")
cookies_table = db.table("cookies")
if (len(cookies_table.all()) == 0):
cookies_table.insert({"last_path": "", "last_site": "", "id": "0"})
sites_navigator();
if (database.get_cookies_table_len() == 0):
database.add_cookies({"last_path": "", "last_site": "", "last_cursor_pos": 0, "id": "0"})
sites_navigator(database);

98
menu.py Normal file
View File

@ -0,0 +1,98 @@
from database import Database
from scrapper import get_files, get_uri
from utils import get_url, get
import platform
from terminal_menu.Menu import Menu
uri = ""
preview_site = {}
preview_database = ""
def get_user_choice_by_menu(options: list, title = None, preview_command=None, preview_size: float = 0.3, show_search_hint: bool = False, skip_empty_entries = False, cursor_pos: int = 0):
menu = Menu(options, title, preview_body_function=preview_command, preview_ratio=preview_size, skip_empty_option=skip_empty_entries, cursor_pos=cursor_pos
)
return (menu.show());
def add_site(database: Database) -> dict:
print("add a site:")
site = {}
site.update({"url": input("url without protocol (ip:port):")})
site.update({"user": input("user(leave blank):")})
site.update({"password": input("password(leave blank):")})
site.update({"id": database.get_sites_table_len()})
name = input(f"name[{str(database.get_sites_table_len())}]")
if (name == ""):
name = database.get_sites_table_len()
site.update({"name": name})
return (site)
def files_preview(filename: str) -> str:
if (not filename.endswith("/")):
return (None)
files = get_files(preview_site, uri + filename)
return ("\n".join(files))
def files(site:dict, path:str, database:Database, pos:int):
global uri
global preview_site
uri = path
preview_site = site
files = get_files(site, path)
choose = get_user_choice_by_menu(files,
preview_command=files_preview,
preview_size=0.3,
show_search_hint=True,
title=f"Index of {path}",
cursor_pos = pos)
if (choose == None):
database.set_last_cursor_pos(0)
return (None)
database.set_last_cursor_pos(choose)
return (files[choose])
def config_preview(site_name:str):
id = int(site_name.split(": ")[0])
site = preview_database.get_site_by_id(id)
if (site == None):
return
str = f"""
url: {site.get('url')}
user: {site.get('user')}
password: {site.get('password')}
"""
return (str)
def site_deleter(sites, database: Database):
global preview_database
preview_database = database
choose = get_user_choice_by_menu(sites,
show_search_hint=True,
preview_title="delete",
preview_command=config_preview)
return (choose)
def param_editor(lst: list):
choose = get_user_choice_by_menu(lst)
return (choose)
def site_editor(lst: list, database: Database):
global preview_database
preview_database = database
choose = get_user_choice_by_menu(lst + ["", "quit"],
skip_empty_entries=True,
show_search_hint=True,
title="Edit")
if (choose == len(lst) + 1):
return (None)
return (choose)
def sites(sites: list):
nb_site = len(sites)
choose = get_user_choice_by_menu(get(sites, "name") + ["", "add", "edit", "delete"],
skip_empty_entries=True,
show_search_hint=True)
return (choose)

31
player.py Normal file
View File

@ -0,0 +1,31 @@
from database import Database
import menu
from time import sleep
import os, subprocess
def _play(url: str, database: Database):
start_pos = 0
viewing_data = database.get_viewing_data(url)
if (viewing_data != None):
response = menu.start_pos([f"go back to {str(viewing_data.get('last_pos'))}", "restart from 0:00"])
if (response == None):
return (1);
elif (response == 0):
start_pos = viewing_data.get("last_pos")
vlc_instance = vlc.Instance()
player = vlc_instance.media_player_new()
media = vlc_instance.media_new(url)
player.set_media(media)
player.play()
sleep(1)
player.video_set_mouse_input(True)
player.set_position(start_pos / player.get_length())
player.set_fullscreen(True)
while True:
print("duration:", player.get_time(),":",player.get_length())
def play(url: str, database: Database):
with open(os.devnull, 'wb') as devnull:
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)

View File

@ -1,12 +1,7 @@
beautifulsoup4==4.12.2
bs4==0.0.1
certifi==2023.5.7
charset-normalizer==3.1.0
enzyme==0.4.1
idna==3.4
install==1.3.5
getch==1.0
requests==2.30.0
simple-term-menu==1.6.1
soupsieve==2.4.1
tinydb==4.7.1
urllib3==2.0.2

31
scrapper.py Normal file
View File

@ -0,0 +1,31 @@
from bs4 import BeautifulSoup
from urllib.parse import quote, unquote
from utils import get_url
import requests
def get_files(site:dict, path:str) -> []:
url = get_url(site) + quote(path)
if (url in ["/../", "../"]):
return ([])
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
files = []
for element in soup.findAll("a"):
files.append(unquote(element["href"]))
return (files)
def get_uri(url: str) -> []:
if (url in ["/../", "../"]):
return ([])
try:
response = requests.get(url)
if (response.status_code != 200):
print("connection:", response.reason)
sys.exit(1)
soup = BeautifulSoup(response.text, 'html.parser')
return(soup.find("h1").text[9:])
except:
return ("")

1
terminal_menu Submodule

Submodule terminal_menu added at 5c07857c79

12
utils.py Normal file
View File

@ -0,0 +1,12 @@
def get_url(site: dict):
if (site.get("user") == "" and site.get("password") == ""):
return (f"http://{site.get('url')}")
else:
return (f"http://{site.get('user')}:{site.get('password')}@{site.get('url')}")
def get(dictionnarys: list, key: str):
lst = []
for dictionnary in dictionnarys:
lst.append(dictionnary.get(key))
return (lst)