Compare commits
2 Commits
266f56f8e6
...
06869df157
Author | SHA1 | Date | |
---|---|---|---|
06869df157 | |||
80481868db |
@ -11,6 +11,12 @@ class Database():
|
|||||||
|
|
||||||
def get_viewing_data(self, url:str ):
|
def get_viewing_data(self, url:str ):
|
||||||
return (self.viewing_table.get(self.query.url == url))
|
return (self.viewing_table.get(self.query.url == url))
|
||||||
|
|
||||||
|
def set_viewing_data(self, url:str, viewing_data: dict):
|
||||||
|
self.viewing_table.update(set(url, value), self.query.url == url)
|
||||||
|
|
||||||
|
def add_viewing_data(self, viewing_data):
|
||||||
|
self.viewing_table.insert(viewing_data)
|
||||||
|
|
||||||
def get_sites(self):
|
def get_sites(self):
|
||||||
return (self.sites_table.all())
|
return (self.sites_table.all())
|
||||||
|
6
main.py
6
main.py
@ -3,7 +3,7 @@ from scrapper import get_uri, get_files
|
|||||||
from urllib.parse import quote, unquote
|
from urllib.parse import quote, unquote
|
||||||
import player
|
import player
|
||||||
from database import Database
|
from database import Database
|
||||||
from utils import get_url
|
import utils
|
||||||
import menu
|
import menu
|
||||||
|
|
||||||
def files_navigator(site: dict):
|
def files_navigator(site: dict):
|
||||||
@ -14,7 +14,7 @@ def files_navigator(site: dict):
|
|||||||
database.set_last_site(site.get("id"))
|
database.set_last_site(site.get("id"))
|
||||||
while True:
|
while True:
|
||||||
pos = database.get_last_cursor_pos()
|
pos = database.get_last_cursor_pos()
|
||||||
path = get_uri(get_url(site) + path)
|
path = get_uri(utils.get_base_url(site) + path)
|
||||||
database.set_last_path(path)
|
database.set_last_path(path)
|
||||||
file = menu.files(site, path, database, pos)
|
file = menu.files(site, path, database, pos)
|
||||||
if (file == None):
|
if (file == None):
|
||||||
@ -22,7 +22,7 @@ def files_navigator(site: dict):
|
|||||||
elif (file == "../" and path == "/"):
|
elif (file == "../" and path == "/"):
|
||||||
return
|
return
|
||||||
if (not file.endswith("/")):
|
if (not file.endswith("/")):
|
||||||
player.play(get_url(site) + quote(path + file), database)
|
player.play(utils.get_base_url(site) + quote(path + file), database)
|
||||||
else:
|
else:
|
||||||
path = path + file
|
path = path + file
|
||||||
|
|
||||||
|
27
menu.py
27
menu.py
@ -1,7 +1,10 @@
|
|||||||
from database import Database
|
from database import Database
|
||||||
from scrapper import get_files, get_uri
|
from scrapper import get_files, get_uri, get_url
|
||||||
from utils import get_url, get
|
import utils
|
||||||
|
from utils import get
|
||||||
import platform
|
import platform
|
||||||
|
import urllib
|
||||||
|
import colorama
|
||||||
from terminal_menu.Menu import Menu
|
from terminal_menu.Menu import Menu
|
||||||
|
|
||||||
|
|
||||||
@ -33,14 +36,32 @@ def files_preview(filename: str) -> str:
|
|||||||
files = get_files(preview_site, uri + filename)
|
files = get_files(preview_site, uri + filename)
|
||||||
return ("\n".join(files))
|
return ("\n".join(files))
|
||||||
|
|
||||||
|
def get_files_formated_by_viewing_data(database: Database, files: list, site: dict, path: str):
|
||||||
|
out = []
|
||||||
|
current_url = utils.get_base_url(site) + urllib.parse.quote(path);
|
||||||
|
for file in files:
|
||||||
|
if file == "..":
|
||||||
|
out.append(file);
|
||||||
|
continue
|
||||||
|
url = current_url + urllib.parse.quote(file)
|
||||||
|
print (url)
|
||||||
|
viewing_data = database.get_viewing_data(url);
|
||||||
|
if (viewing_data == None) or (viewing_data.get("finished") == False):
|
||||||
|
out.append(colorama.Style.BRIGHT + file + colorama.Style.RESET_ALL)
|
||||||
|
else:
|
||||||
|
out.append(colorama.Fore.LIGHTBLACK_EX + file + colorama.Style.RESET_ALL);
|
||||||
|
return (out);
|
||||||
|
|
||||||
def files(site:dict, path:str, database:Database, pos:int):
|
def files(site:dict, path:str, database:Database, pos:int):
|
||||||
global uri
|
global uri
|
||||||
global preview_site
|
global preview_site
|
||||||
|
|
||||||
uri = path
|
uri = path
|
||||||
preview_site = site
|
preview_site = site
|
||||||
|
current_url = get_url(site, path);
|
||||||
files = get_files(site, path)
|
files = get_files(site, path)
|
||||||
choose = get_user_choice_by_menu(files,
|
files_displayed = get_files_formated_by_viewing_data(database, files, site, path)
|
||||||
|
choose = get_user_choice_by_menu(files_displayed,
|
||||||
preview_command=files_preview,
|
preview_command=files_preview,
|
||||||
preview_size=0.3,
|
preview_size=0.3,
|
||||||
show_search_hint=True,
|
show_search_hint=True,
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from database import Database
|
from database import Database
|
||||||
import menu
|
import menu
|
||||||
|
import terminal_menu.Menu as terminal_menu
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import os, subprocess
|
import os, subprocess
|
||||||
|
|
||||||
@ -26,6 +27,10 @@ def _play(url: str, database: Database):
|
|||||||
|
|
||||||
|
|
||||||
def play(url: str, database: Database):
|
def play(url: str, database: Database):
|
||||||
|
viewing_data = database.get_viewing_data(url);
|
||||||
|
if (viewing_data == None):
|
||||||
|
viewing_data = {"url": url, "finished": True}
|
||||||
|
database.add_viewing_data(viewing_data);
|
||||||
with open(os.devnull, 'wb') as devnull:
|
with open(os.devnull, 'wb') as devnull:
|
||||||
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)
|
subprocess.check_call(['vlc', url], stdout=devnull, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
@ -1,10 +1,14 @@
|
|||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from urllib.parse import quote, unquote
|
from urllib.parse import quote, unquote
|
||||||
from utils import get_url
|
import utils
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
def get_url(site: dict, path: str):
|
||||||
|
url = utils.get_base_url(site) + quote(path)
|
||||||
|
return (url)
|
||||||
|
|
||||||
def get_files(site:dict, path:str) -> []:
|
def get_files(site:dict, path:str) -> []:
|
||||||
url = get_url(site) + quote(path)
|
url = get_url(site, path)
|
||||||
if (url in ["/../", "../"]):
|
if (url in ["/../", "../"]):
|
||||||
return ([])
|
return ([])
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
|
Loading…
Reference in New Issue
Block a user