diff --git a/.gitignore b/.gitignore index 9efdd5c25..f81b9ac07 100644 --- a/.gitignore +++ b/.gitignore @@ -11,11 +11,11 @@ accounts/* MediaInfo/* Images/* Thumbnails/* -tanha/* +rclone/* +tokens/* list_drives.txt cookies.txt downloads -bot.session -user.session +bot.session* terabox.txt -rcl.conf \ No newline at end of file +rclone.conf diff --git a/bot/__init__.py b/bot/__init__.py index 2f58ea472..76c8f8bdb 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -1,435 +1,247 @@ -import sys from os import path as ospath -from os import remove as osremove -from os import environ -from time import time, sleep +from os import remove, environ +from time import time from socket import setdefaulttimeout -from asyncio import Lock +from asyncio import Lock, get_event_loop from logging import ( INFO, ERROR, Formatter, FileHandler, StreamHandler, - error, - warning, getLogger, basicConfig, ) -from threading import Thread -from subprocess import Popen, check_output -from subprocess import run as srun -from faulthandler import enable as faulthandler_enable +from datetime import datetime +from subprocess import Popen, run, check_output -from aria2p import API +from pytz import timezone +from aria2p import API as ariaAPI from aria2p import Client as ariaClient from dotenv import load_dotenv, dotenv_values from uvloop import install -from pymongo import MongoClient from tzlocal import get_localzone from pyrogram import Client as tgClient from pyrogram import enums from qbittorrentapi import Client as qbClient +from pymongo.server_api import ServerApi +from pymongo.mongo_client import MongoClient from apscheduler.schedulers.asyncio import AsyncIOScheduler -faulthandler_enable() +load_dotenv("config.env", override=True) + +LOG_FILE = "log.txt" +TORRENT_TIMEOUT = 1800 +DOWNLOAD_DIR = "/usr/src/app/downloads/" + + install() setdefaulttimeout(600) +botStartTime = time() +bot_loop = get_event_loop() + +getLogger("pyrogram").setLevel(ERROR) getLogger("pymongo").setLevel(ERROR) getLogger("httpx").setLevel(ERROR) -bot_start_time = time() class CustomFormatter(Formatter): + def formatTime(self, record, datefmt=None): + dt = datetime.fromtimestamp(record.created, tz=timezone("Asia/Dhaka")) + return dt.strftime(datefmt) if datefmt else dt.isoformat() + def format(self, record): return super().format(record).replace(record.levelname, record.levelname[:1]) formatter = CustomFormatter( - "[%(asctime)s] [%(levelname)s] - %(message)s", datefmt="%d-%b-%y %I:%M:%S %p" + "[%(asctime)s] [%(levelname)s] %(message)s | [%(module)s:%(lineno)d]", + datefmt="%d-%b %I:%M:%S %p", ) -file_handler = FileHandler("log.txt") +file_handler = FileHandler(LOG_FILE) file_handler.setFormatter(formatter) stream_handler = StreamHandler() stream_handler.setFormatter(formatter) basicConfig(handlers=[file_handler, stream_handler], level=INFO) - LOGGER = getLogger(__name__) -load_dotenv("config.env", override=True) - -Interval = [] -QbInterval = [] -QbTorrents = {} -GLOBAL_EXTENSION_FILTER = ["aria2", "!qB"] +Intervals = {"status": {}, "qb": "", "stopAll": False} +QbTorrents, DRIVES_NAMES, DRIVES_IDS, INDEX_URLS = {}, [], [], [] +GLOBAL_EXTENSION_FILTER = [ + "aria2", + "!qB", + "txt", + "jpg", + "jpeg", + "png", + "html", + "nfo", + "url", + "php", + "aspx", +] user_data = {} -extra_buttons = {} -list_drives_dict = {} -shorteners_list = [] -aria2_options = {} -qbit_options = {} -queued_dl = {} -queued_up = {} -non_queued_dl = set() -non_queued_up = set() -download_dict_lock = Lock() -status_reply_dict_lock = Lock() -queue_dict_lock = Lock() -qb_listener_lock = Lock() -status_reply_dict = {} -download_dict = {} - -BOT_TOKEN = environ.get("BOT_TOKEN", "") -if len(BOT_TOKEN) == 0: - error("BOT_TOKEN variable is missing! Exiting now") - sys.exit(1) - -bot_id = BOT_TOKEN.split(":", 1)[0] +aria2_options, qbit_options = {}, {} +queued_dl, queued_up = {}, {} +non_queued_dl, non_queued_up = set(), set() +multi_tags, status_dict, task_dict = set(), {}, {} +task_dict_lock = Lock() +queue_dict_lock, qb_listener_lock, cpu_eater_lock, subprocess_lock = ( + Lock(), + Lock(), + Lock(), + Lock(), +) +BOT_TOKEN = environ["BOT_TOKEN"] DATABASE_URL = environ.get("DATABASE_URL", "") -if len(DATABASE_URL) == 0: - DATABASE_URL = "" - -if DATABASE_URL: - conn = MongoClient(DATABASE_URL) - db = conn.luna - current_config = dict(dotenv_values("config.env")) - old_config = db.settings.deployConfig.find_one({"_id": bot_id}) - if old_config is None: - db.settings.deployConfig.replace_one( - {"_id": bot_id}, current_config, upsert=True - ) - else: - del old_config["_id"] - if old_config and old_config != current_config: - db.settings.deployConfig.replace_one( - {"_id": bot_id}, current_config, upsert=True - ) - elif config_dict := db.settings.config.find_one({"_id": bot_id}): - del config_dict["_id"] - for key, value in config_dict.items(): - environ[key] = str(value) - if pf_dict := db.settings.files.find_one({"_id": bot_id}): - del pf_dict["_id"] - for key, value in pf_dict.items(): - if value: - file_ = key.replace("__", ".") - with open(file_, "wb+") as f: - f.write(value) - if a2c_options := db.settings.aria2c.find_one({"_id": bot_id}): - del a2c_options["_id"] - aria2_options = a2c_options - if qbit_opt := db.settings.qbittorrent.find_one({"_id": bot_id}): - del qbit_opt["_id"] - qbit_options = qbit_opt - conn.close() - BOT_TOKEN = environ.get("BOT_TOKEN", "") - bot_id = BOT_TOKEN.split(":", 1)[0] - DATABASE_URL = environ.get("DATABASE_URL", "") -else: - config_dict = {} - -GROUPS_EMAIL = environ.get("GROUPS_EMAIL", "") -if len(GROUPS_EMAIL) != 0: - GROUPS_EMAIL = GROUPS_EMAIL.lower() - -OWNER_ID = environ.get("OWNER_ID", "") -if len(OWNER_ID) == 0: - error("OWNER_ID variable is missing! Exiting now") - sys.exit(1) -else: - OWNER_ID = int(OWNER_ID) - -TELEGRAM_API = environ.get("TELEGRAM_API", "") -if len(TELEGRAM_API) == 0: - error("TELEGRAM_API variable is missing! Exiting now") - sys.exit(1) -else: - TELEGRAM_API = int(TELEGRAM_API) - -TELEGRAM_HASH = environ.get("TELEGRAM_HASH", "") -if len(TELEGRAM_HASH) == 0: - error("TELEGRAM_HASH variable is missing! Exiting now") - sys.exit(1) - -GDRIVE_ID = environ.get("GDRIVE_ID", "") -if len(GDRIVE_ID) == 0: - GDRIVE_ID = "" - -METADATA_KEY = environ.get("METADATA_KEY", "") -if len(METADATA_KEY) == 0: - METADATA_KEY = "" - -RCLONE_PATH = environ.get("RCLONE_PATH", "") -if len(RCLONE_PATH) == 0: - RCLONE_PATH = "" - -ATTACHMENT_URL = environ.get("ATTACHMENT_URL", "") -if len(ATTACHMENT_URL) == 0: - ATTACHMENT_URL = "" - -RCLONE_FLAGS = environ.get("RCLONE_FLAGS", "") -if len(RCLONE_FLAGS) == 0: - RCLONE_FLAGS = "" +TELEGRAM_API = int(environ["TELEGRAM_API"]) +TELEGRAM_HASH = environ["TELEGRAM_HASH"] +OWNER_ID = int(environ["OWNER_ID"]) +bot_id = BOT_TOKEN.split(":")[0] -DEFAULT_UPLOAD = environ.get("DEFAULT_UPLOAD", "") -if DEFAULT_UPLOAD != "rc": - DEFAULT_UPLOAD = "gd" -EXTENSION_FILTER = environ.get("EXTENSION_FILTER", "") -if len(EXTENSION_FILTER) > 0: - fx = EXTENSION_FILTER.split() - for x in fx: - cleaned_x = x.lstrip(".") - GLOBAL_EXTENSION_FILTER.append(cleaned_x.strip().lower()) - -IS_PREMIUM_USER = False -user = "" -USER_SESSION_STRING = environ.get("USER_SESSION_STRING", "") -if len(USER_SESSION_STRING) != 0: +def initialize_database(): try: - user = tgClient( - "user", - TELEGRAM_API, - TELEGRAM_HASH, - session_string=USER_SESSION_STRING, - workers=1000, - parse_mode=enums.ParseMode.HTML, - no_updates=True, - ).start() - IS_PREMIUM_USER = user.me.is_premium + conn = MongoClient(DATABASE_URL, server_api=ServerApi("1")) + db = conn.luna + current_config = dict(dotenv_values("config.env")) + old_config = db.settings.deployConfig.find_one({"_id": bot_id}) + if old_config is None: + db.settings.deployConfig.replace_one( + {"_id": bot_id}, current_config, upsert=True + ) + else: + del old_config["_id"] + if old_config and old_config != current_config: + db.settings.deployConfig.replace_one( + {"_id": bot_id}, current_config, upsert=True + ) + elif config_dict := db.settings.config.find_one({"_id": bot_id}): + del config_dict["_id"] + for key, value in config_dict.items(): + environ[key] = str(value) + if pf_dict := db.settings.files.find_one({"_id": bot_id}): + del pf_dict["_id"] + for key, value in pf_dict.items(): + if value: + file_ = key.replace("__", ".") + with open(file_, "wb+") as f: + f.write(value) + if a2c_options := db.settings.aria2c.find_one({"_id": bot_id}): + del a2c_options["_id"] + aria2_options.update(a2c_options) + if qbit_opt := db.settings.qbittorrent.find_one({"_id": bot_id}): + del qbit_opt["_id"] + qbit_options.update(qbit_opt) + conn.close() except Exception as e: - error(f"Failed making client from USER_SESSION_STRING : {e}") - user = "" - -MAX_SPLIT_SIZE = 4194304000 if IS_PREMIUM_USER else 2097152000 - -MEGA_EMAIL = environ.get("MEGA_EMAIL", "") -MEGA_PASSWORD = environ.get("MEGA_PASSWORD", "") -if len(MEGA_EMAIL) == 0 or len(MEGA_PASSWORD) == 0: - MEGA_EMAIL = "" - MEGA_PASSWORD = "" - -FILELION_API = environ.get("FILELION_API", "") -if len(FILELION_API) == 0: - FILELION_API = "" - -INDEX_URL = environ.get("INDEX_URL", "").rstrip("/") -if len(INDEX_URL) == 0: - INDEX_URL = "" - -SEARCH_API_LINK = environ.get("SEARCH_API_LINK", "").rstrip("/") -if len(SEARCH_API_LINK) == 0: - SEARCH_API_LINK = "" - -STREAMWISH_API = environ.get("STREAMWISH_API", "") -if len(STREAMWISH_API) == 0: - STREAMWISH_API = "" - -BOT_MAX_TASKS = environ.get("BOT_MAX_TASKS", "") -BOT_MAX_TASKS = int(BOT_MAX_TASKS) if BOT_MAX_TASKS.isdigit() else "" - -LEECH_LOG_ID = environ.get("LEECH_LOG_ID", "") -LEECH_LOG_ID = "" if len(LEECH_LOG_ID) == 0 else int(LEECH_LOG_ID) - -YT_DLP_OPTIONS = environ.get("YT_DLP_OPTIONS", "") -if len(YT_DLP_OPTIONS) == 0: - YT_DLP_OPTIONS = "" - -SEARCH_LIMIT = environ.get("SEARCH_LIMIT", "") -SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT) - -LEECH_DUMP_ID = environ.get("LEECH_DUMP_ID", "") -if len(LEECH_DUMP_ID) == 0: - LEECH_DUMP_ID = "" - -CMD_SUFFIX = environ.get("CMD_SUFFIX", "") - -TORRENT_TIMEOUT = environ.get("TORRENT_TIMEOUT", "") -TORRENT_TIMEOUT = 3000 if len(TORRENT_TIMEOUT) == 0 else int(TORRENT_TIMEOUT) - -QUEUE_ALL = environ.get("QUEUE_ALL", "") -QUEUE_ALL = "" if len(QUEUE_ALL) == 0 else int(QUEUE_ALL) - -QUEUE_DOWNLOAD = environ.get("QUEUE_DOWNLOAD", "") -QUEUE_DOWNLOAD = "" if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD) + LOGGER.error(f"Database ERROR: {e}") -QUEUE_UPLOAD = environ.get("QUEUE_UPLOAD", "") -QUEUE_UPLOAD = "" if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD) -STOP_DUPLICATE = environ.get("STOP_DUPLICATE", "") -STOP_DUPLICATE = STOP_DUPLICATE.lower() == "true" +initialize_database() -USE_SERVICE_ACCOUNTS = environ.get("USE_SERVICE_ACCOUNTS", "") -USE_SERVICE_ACCOUNTS = USE_SERVICE_ACCOUNTS.lower() == "true" - -AS_DOCUMENT = environ.get("AS_DOCUMENT", "") -AS_DOCUMENT = AS_DOCUMENT.lower() == "true" - -SHOW_MEDIAINFO = environ.get("SHOW_MEDIAINFO", "") -SHOW_MEDIAINFO = SHOW_MEDIAINFO.lower() == "true" - -MEDIA_GROUP = environ.get("MEDIA_GROUP", "") -MEDIA_GROUP = MEDIA_GROUP.lower() == "true" - -BASE_URL = environ.get("BASE_URL", "").rstrip("/") -if len(BASE_URL) == 0: - warning("BASE_URL not provided!") - BASE_URL = "" - -UPSTREAM_REPO = environ.get("UPSTREAM_REPO", "") -if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = "" - -UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "") -if len(UPSTREAM_BRANCH) == 0: - UPSTREAM_BRANCH = "main" - -TORRENT_LIMIT = environ.get("TORRENT_LIMIT", "") -TORRENT_LIMIT = "" if len(TORRENT_LIMIT) == 0 else float(TORRENT_LIMIT) - -DIRECT_LIMIT = environ.get("DIRECT_LIMIT", "") -DIRECT_LIMIT = "" if len(DIRECT_LIMIT) == 0 else float(DIRECT_LIMIT) - -YTDLP_LIMIT = environ.get("YTDLP_LIMIT", "") -YTDLP_LIMIT = "" if len(YTDLP_LIMIT) == 0 else float(YTDLP_LIMIT) - -GDRIVE_LIMIT = environ.get("GDRIVE_LIMIT", "") -GDRIVE_LIMIT = "" if len(GDRIVE_LIMIT) == 0 else float(GDRIVE_LIMIT) +if not ospath.exists(".netrc"): + with open(".netrc", "w"): + pass -CLONE_LIMIT = environ.get("CLONE_LIMIT", "") -CLONE_LIMIT = "" if len(CLONE_LIMIT) == 0 else float(CLONE_LIMIT) -MEGA_LIMIT = environ.get("MEGA_LIMIT", "") -MEGA_LIMIT = "" if len(MEGA_LIMIT) == 0 else float(MEGA_LIMIT) +def init_user_client(): + user_session = environ.get("USER_SESSION_STRING", "") + if user_session: + LOGGER.info("Creating client from USER_SESSION_STRING") + try: + user = tgClient( + "user", + TELEGRAM_API, + TELEGRAM_HASH, + session_string=user_session, + parse_mode=enums.ParseMode.HTML, + no_updates=True, + ).start() + return user.me.is_premium, user + except Exception as e: + LOGGER.error(e) + return False, "" + return False, "" + + +IS_PREMIUM_USER, user = init_user_client() +MAX_SPLIT_SIZE = 4194304000 if IS_PREMIUM_USER else 2097152000 -LEECH_LIMIT = environ.get("LEECH_LIMIT", "") -LEECH_LIMIT = "" if len(LEECH_LIMIT) == 0 else float(LEECH_LIMIT) +DEFAULT_UPLOAD = environ.get("DEFAULT_UPLOAD", "") +if DEFAULT_UPLOAD != "rc": + DEFAULT_UPLOAD = "gd" -USER_MAX_TASKS = environ.get("USER_MAX_TASKS", "") -USER_MAX_TASKS = "" if len(USER_MAX_TASKS) == 0 else int(USER_MAX_TASKS) -PLAYLIST_LIMIT = environ.get("PLAYLIST_LIMIT", "") -PLAYLIST_LIMIT = "" if len(PLAYLIST_LIMIT) == 0 else int(PLAYLIST_LIMIT) +def load_user_data(): + AUTHORIZED_CHATS = environ.get("AUTHORIZED_CHATS", "") + if AUTHORIZED_CHATS: + for id_ in AUTHORIZED_CHATS.split(): + user_data[int(id_.strip())] = {"is_auth": True} -DELETE_LINKS = environ.get("DELETE_LINKS", "") -DELETE_LINKS = DELETE_LINKS.lower() == "true" + SUDO_USERS = environ.get("SUDO_USERS", "") + if SUDO_USERS: + for id_ in SUDO_USERS.split(): + user_data[int(id_.strip())] = {"is_sudo": True} -FSUB_IDS = environ.get("FSUB_IDS", "") -if len(FSUB_IDS) == 0: - FSUB_IDS = "" + EXTENSION_FILTER = environ.get("EXTENSION_FILTER", "") + if EXTENSION_FILTER: + for x in EXTENSION_FILTER.split(): + x = x.lstrip(".") + GLOBAL_EXTENSION_FILTER.append(x.strip().lower()) -MIRROR_LOG_ID = environ.get("MIRROR_LOG_ID", "") -if len(MIRROR_LOG_ID) == 0: - MIRROR_LOG_ID = "" -IMAGES = environ.get("IMAGES", "") -IMAGES = ( - IMAGES.replace("'", "") - .replace('"', "") - .replace("[", "") - .replace("]", "") - .replace(",", "") -).split() +load_user_data() -SET_COMMANDS = environ.get("SET_COMMANDS", "") -SET_COMMANDS = SET_COMMANDS.lower() == "true" +def get_env_int(key): + value = environ.get(key, None) + if value is None or value == "" or int(value) == 0: + return "" + return int(value) -TOKEN_TIMEOUT = environ.get("TOKEN_TIMEOUT", "") -TOKEN_TIMEOUT = int(TOKEN_TIMEOUT) if TOKEN_TIMEOUT.isdigit() else "" config_dict = { - "AS_DOCUMENT": AS_DOCUMENT, - "BASE_URL": BASE_URL, - "BOT_TOKEN": BOT_TOKEN, - "BOT_MAX_TASKS": BOT_MAX_TASKS, - "CMD_SUFFIX": CMD_SUFFIX, - "DATABASE_URL": DATABASE_URL, - "DELETE_LINKS": DELETE_LINKS, + "AS_DOCUMENT": environ.get("AS_DOCUMENT", "").lower() == "true", + "AUTHORIZED_CHATS": environ.get("AUTHORIZED_CHATS", ""), + "BASE_URL": environ.get("BASE_URL", "").rstrip("/"), + "CMD_SUFFIX": environ.get("CMD_SUFFIX", ""), "DEFAULT_UPLOAD": DEFAULT_UPLOAD, - "FILELION_API": FILELION_API, - "TORRENT_LIMIT": TORRENT_LIMIT, - "DIRECT_LIMIT": DIRECT_LIMIT, - "YTDLP_LIMIT": YTDLP_LIMIT, - "GDRIVE_LIMIT": GDRIVE_LIMIT, - "CLONE_LIMIT": CLONE_LIMIT, - "MEGA_LIMIT": MEGA_LIMIT, - "LEECH_LIMIT": LEECH_LIMIT, - "FSUB_IDS": FSUB_IDS, - "USER_MAX_TASKS": USER_MAX_TASKS, - "PLAYLIST_LIMIT": PLAYLIST_LIMIT, - "MIRROR_LOG_ID": MIRROR_LOG_ID, - "LEECH_DUMP_ID": LEECH_DUMP_ID, - "IMAGES": IMAGES, - "EXTENSION_FILTER": EXTENSION_FILTER, - "GDRIVE_ID": GDRIVE_ID, - "ATTACHMENT_URL": ATTACHMENT_URL, - "INDEX_URL": INDEX_URL, - "LEECH_LOG_ID": LEECH_LOG_ID, - "TOKEN_TIMEOUT": TOKEN_TIMEOUT, - "MEDIA_GROUP": MEDIA_GROUP, - "MEGA_EMAIL": MEGA_EMAIL, - "MEGA_PASSWORD": MEGA_PASSWORD, - "METADATA_KEY": METADATA_KEY, - "OWNER_ID": OWNER_ID, - "QUEUE_ALL": QUEUE_ALL, - "QUEUE_DOWNLOAD": QUEUE_DOWNLOAD, - "QUEUE_UPLOAD": QUEUE_UPLOAD, - "RCLONE_FLAGS": RCLONE_FLAGS, - "RCLONE_PATH": RCLONE_PATH, - "SEARCH_API_LINK": SEARCH_API_LINK, - "SEARCH_LIMIT": SEARCH_LIMIT, - "SET_COMMANDS": SET_COMMANDS, - "SHOW_MEDIAINFO": SHOW_MEDIAINFO, - "STOP_DUPLICATE": STOP_DUPLICATE, - "STREAMWISH_API": STREAMWISH_API, - "TELEGRAM_API": TELEGRAM_API, - "TELEGRAM_HASH": TELEGRAM_HASH, - "TORRENT_TIMEOUT": TORRENT_TIMEOUT, - "UPSTREAM_REPO": UPSTREAM_REPO, - "UPSTREAM_BRANCH": UPSTREAM_BRANCH, - "USER_SESSION_STRING": USER_SESSION_STRING, - "GROUPS_EMAIL": GROUPS_EMAIL, - "USE_SERVICE_ACCOUNTS": USE_SERVICE_ACCOUNTS, - "YT_DLP_OPTIONS": YT_DLP_OPTIONS, + "EXTENSION_FILTER": environ.get("EXTENSION_FILTER", ""), + "FSUB_IDS": environ.get("FSUB_IDS", ""), + "FILELION_API": environ.get("FILELION_API", ""), + "GDRIVE_ID": environ.get("GDRIVE_ID", ""), + "INDEX_URL": environ.get("INDEX_URL", "").rstrip("/"), + "IS_TEAM_DRIVE": environ.get("IS_TEAM_DRIVE", "").lower() == "true", + "LEECH_DUMP_CHAT": int(environ.get("LEECH_DUMP_CHAT", 0)), + "LOG_CHAT": int(environ.get("LOG_CHAT", 0)), + "MEGA_EMAIL": environ.get("MEGA_EMAIL", ""), + "MEGA_PASSWORD": environ.get("MEGA_PASSWORD", ""), + "PAID_CHAT_ID": environ.get("PAID_CHAT_ID", ""), + "PAID_CHAT_LINK": environ.get("PAID_CHAT_LINK", ""), + "QUEUE_ALL": get_env_int("QUEUE_ALL"), + "QUEUE_DOWNLOAD": get_env_int("QUEUE_DOWNLOAD"), + "QUEUE_UPLOAD": get_env_int("QUEUE_UPLOAD"), + "RCLONE_FLAGS": environ.get("RCLONE_FLAGS", ""), + "RCLONE_PATH": environ.get("RCLONE_PATH", ""), + "STOP_DUPLICATE": environ.get("STOP_DUPLICATE", "").lower() == "true", + "STREAMWISH_API": environ.get("STREAMWISH_API", ""), + "SUDO_USERS": environ.get("SUDO_USERS", ""), + "TOKEN_TIMEOUT": get_env_int("TOKEN_TIMEOUT"), + "UPSTREAM_BRANCH": environ.get("UPSTREAM_BRANCH", "main"), + "USER_SESSION_STRING": environ.get("USER_SESSION_STRING", ""), + "USE_SA": environ.get("USE_SA", "").lower() == "true", + "YT_DLP_OPTIONS": environ.get("YT_DLP_OPTIONS", ""), } -if GDRIVE_ID: - list_drives_dict["Main"] = {"drive_id": GDRIVE_ID, "index_link": INDEX_URL} - -if ospath.exists("list_drives.txt"): - with open("list_drives.txt", "r+") as f: - lines = f.readlines() - for line in lines: - sep = 2 if line.strip().split()[-1].startswith("http") else 1 - temp = line.strip().rsplit(maxsplit=sep) - name = "Main Custom" if temp[0].casefold() == "Main" else temp[0] - list_drives_dict[name] = { - "drive_id": temp[1], - "index_link": (temp[2] if sep == 2 else ""), - } - -if ospath.exists("buttons.txt"): - with open("buttons.txt", "r+") as f: - lines = f.readlines() - for line in lines: - temp = line.strip().split() - if len(extra_buttons.keys()) == 4: - break - if len(temp) == 2: - extra_buttons[temp[0].replace("_", " ")] = temp[1] - -if ospath.exists("shorteners.txt"): - with open("shorteners.txt", "r+") as f: - lines = f.readlines() - for line in lines: - temp = line.strip().split() - if len(temp) == 2: - shorteners_list.append({"domain": temp[0], "api_key": temp[1]}) +if GDID := environ.get("GDRIVE_ID"): + DRIVES_NAMES.append("Main") + DRIVES_IDS.append(GDID) + INDEX_URLS.append(config_dict["INDEX_URL"]) PORT = environ.get("PORT") Popen( @@ -437,12 +249,12 @@ def format(self, record): shell=True, ) -srun(["xnox", "-d", "--profile=."], check=False) +run(["xnox", "-d", "--profile=."], check=False) if not ospath.exists(".netrc"): with open(".netrc", "w"): pass -srun(["chmod", "600", ".netrc"], check=False) -srun(["cp", ".netrc", "/root/.netrc"], check=False) +run(["chmod", "600", ".netrc"], check=False) +run(["cp", ".netrc", "/root/.netrc"], check=False) trackers = ( check_output( @@ -453,27 +265,25 @@ def format(self, record): .rstrip(",") ) with open("a2c.conf", "a+") as a: - if TORRENT_TIMEOUT is not None: - a.write(f"bt-stop-timeout={TORRENT_TIMEOUT}\n") + a.write(f"bt-stop-timeout={TORRENT_TIMEOUT}\n") a.write(f"bt-tracker=[{trackers}]") -srun(["xria", "--conf-path=/usr/src/app/a2c.conf"], check=False) +run(["xria", "--conf-path=/usr/src/app/a2c.conf"], check=False) if ospath.exists("accounts.zip"): if ospath.exists("accounts"): - srun(["rm", "-rf", "accounts"], check=False) - srun( + run(["rm", "-rf", "accounts"], check=False) + run( ["7z", "x", "-o.", "-bd", "-aoa", "accounts.zip", "accounts/*.json"], check=False, ) - srun(["chmod", "-R", "777", "accounts"], check=False) - osremove("accounts.zip") + run(["chmod", "-R", "777", "accounts"], check=False) + remove("accounts.zip") if not ospath.exists("accounts"): - config_dict["USE_SERVICE_ACCOUNTS"] = False -alive = Popen(["python3", "alive.py"]) -sleep(0.5) + config_dict["USE_SA"] = False -aria2 = API(ariaClient(host="http://localhost", port=6800, secret="")) +alive = Popen(["python3", "alive.py"]) +aria2 = ariaAPI(ariaClient(host="http://localhost", port=6800, secret="")) xnox_client = qbClient( host="localhost", @@ -487,23 +297,6 @@ def format(self, record): }, ) - -def aria2c_init(): - try: - link = "https://linuxmint.com/torrents/lmde-5-cinnamon-64bit.iso.torrent" - dire = "/usr/src/app/downloads/".rstrip("/") - aria2.add_uris([link], {"dir": dire}) - sleep(3) - downloads = aria2.get_downloads() - sleep(10) - aria2.remove(downloads, force=True, files=True, clean=True) - except Exception as e: - error(f"Aria2c initializing error: {e}") - - -Thread(target=aria2c_init).start() -sleep(1.5) - aria2c_global = [ "bt-max-open-files", "download-result", @@ -519,31 +312,34 @@ def aria2c_init(): "save-cookies", "server-stat-of", ] +aria2_options = aria2_options or aria2.client.get_global_option() +aria2.set_global_options( + {op: aria2_options[op] for op in aria2c_global if op in aria2_options} +) -if not aria2_options: - aria2_options = aria2.client.get_global_option() -else: - a2c_glo = {op: aria2_options[op] for op in aria2c_global if op in aria2_options} - aria2.set_global_options(a2c_glo) - -if not qbit_options: - qbit_options = dict(xnox_client.app_preferences()) - del qbit_options["listen_port"] - for k in list(qbit_options.keys()): - if k.startswith("rss"): - del qbit_options[k] -else: - qb_opt = {**qbit_options} - xnox_client.app_set_preferences(qb_opt) + +def get_qb_options(): + global qbit_options + if not qbit_options: + qbit_options = dict(xnox_client.app_preferences()) + del qbit_options["listen_port"] + for k in list(qbit_options.keys()): + if k.startswith("rss"): + del qbit_options[k] + else: + xnox_client.app_set_preferences(qbit_options) + + +get_qb_options() bot = tgClient( "bot", TELEGRAM_API, TELEGRAM_HASH, bot_token=BOT_TOKEN, - workers=1000, parse_mode=enums.ParseMode.HTML, ).start() bot_loop = bot.loop -bot_name = bot.me.username +bot_username = bot.me.username + scheduler = AsyncIOScheduler(timezone=str(get_localzone()), event_loop=bot_loop) diff --git a/bot/__main__.py b/bot/__main__.py index a2df4f7b0..19dc0413d 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,5 +1,3 @@ -# ruff: noqa: F401 -import contextlib from os import execl as osexecl from sys import executable from html import escape @@ -8,59 +6,64 @@ from signal import SIGINT, signal from asyncio import gather, create_subprocess_exec -from psutil import boot_time, disk_usage, cpu_percent, virtual_memory +from psutil import ( + boot_time, + cpu_count, + disk_usage, + cpu_percent, + swap_memory, + virtual_memory, + net_io_counters, +) from aiofiles import open as aiopen from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove from pyrogram.filters import regex, command from pyrogram.handlers import MessageHandler, CallbackQueryHandler from bot import ( LOGGER, - DATABASE_URL, - Interval, - QbInterval, + Intervals, bot, - bot_name, scheduler, user_data, config_dict, - bot_start_time, + bot_username, + botStartTime, ) -from .modules import ( - list, +from .modules import ( # noqa: F401 + exec, + help, clone, - count, shell, ytdlp, - delete, - images, status, - executor, + gd_count, authorize, broadcast, + gd_delete, + gd_search, mediainfo, - speedtest, + cancel_task, bot_settings, mirror_leech, - cancel_mirror, + file_selector, torrent_search, - torrent_select, users_settings, ) from .helper.ext_utils.bot_utils import ( + cmd_exec, new_task, - new_thread, - set_commands, sync_to_async, - get_readable_time, - get_readable_file_size, + create_help_buttons, ) -from .helper.ext_utils.db_handler import DbManager -from .helper.ext_utils.files_utils import clean_all, exit_clean_up, start_cleanup +from .helper.ext_utils.db_handler import Database +from .helper.ext_utils.files_utils import clean_all, exit_clean_up +from .helper.ext_utils.status_utils import get_readable_time, get_readable_file_size from .helper.telegram_helper.filters import CustomFilters from .helper.listeners.aria2_listener import start_aria2_listener +from .helper.ext_utils.telegraph_helper import telegraph from .helper.telegram_helper.bot_commands import BotCommands from .helper.telegram_helper.button_build import ButtonMaker from .helper.telegram_helper.message_utils import ( @@ -68,103 +71,58 @@ edit_message, send_message, delete_message, - one_minute_del, five_minute_del, ) -if config_dict["GDRIVE_ID"]: - help_string = f"""NOTE: Try each command without any arguments to see more details. - -
/{BotCommands.MirrorCommand[0]} - Start mirroring to Google Drive. -/{BotCommands.LeechCommand[0]} - Start leeching to Telegram. -/{BotCommands.YtdlCommand[0]} - Mirror links supported by yt-dlp. -/{BotCommands.YtdlLeechCommand[0]} - Leech links supported by yt-dlp. -/{BotCommands.CloneCommand[0]} - Copy files/folders to Google Drive. -/{BotCommands.CountCommand} - Count files/folders in Google Drive. -/{BotCommands.ListCommand} - Search in Google Drive(s). -/{BotCommands.UserSetCommand} - Open the settings panel. -/{BotCommands.MediaInfoCommand} - View MediaInfo from a file or link. -/{BotCommands.StopAllCommand[0]} - Cancel all active tasks. -/{BotCommands.SearchCommand} - Search for torrents using API or plugins. -/{BotCommands.StatusCommand[0]} - Show the status of all downloads. -/{BotCommands.StatsCommand[0]} - Display machine stats hosting the bot.-""" -else: - help_string = f"""NOTE: Try each command without any arguments to see more details. - -
/{BotCommands.LeechCommand[0]} - Start leeching to Telegram. -/{BotCommands.YtdlLeechCommand[0]} - Leech links supported by yt-dlp. -/{BotCommands.UserSetCommand} - Open the settings panel. -/{BotCommands.MediaInfoCommand} - View MediaInfo from a file or link. -/{BotCommands.StopAllCommand[0]} - Cancel all active tasks. -/{BotCommands.SearchCommand} - Search for torrents using API or plugins. -/{BotCommands.StatusCommand[0]} - Show the status of all downloads. -/{BotCommands.StatsCommand[0]} - Display machine stats hosting the bot.-""" - - -@new_thread + async def stats(_, message): + if await aiopath.exists(".git"): + last_commit = await cmd_exec( + "git log -1 --date=short --pretty=format:'%cd From %cr'", True + ) + last_commit = last_commit[0] + else: + last_commit = "No UPSTREAM_REPO" total, used, free, disk = disk_usage("/") + swap = swap_memory() memory = virtual_memory() - current_time = get_readable_time(time() - bot_start_time) - os_uptime = get_readable_time(time() - boot_time()) - cpu_usage = cpu_percent(interval=0.5) - limit_mapping = { - "Torrent": config_dict.get("TORRENT_LIMIT", "∞"), - "Gdrive": config_dict.get("GDRIVE_LIMIT", "∞"), - "Ytdlp": config_dict.get("YTDLP_LIMIT", "∞"), - "Direct": config_dict.get("DIRECT_LIMIT", "∞"), - "Leech": config_dict.get("LEECH_LIMIT", "∞"), - "Clone": config_dict.get("CLONE_LIMIT", "∞"), - "Mega": config_dict.get("MEGA_LIMIT", "∞"), - "User task": config_dict.get("USER_MAX_TASKS", "∞"), - } - system_info = ( - f"
• Bot uptime :
{current_time}\n"
- f"• Sys uptime :
{os_uptime}\n"
- f"• CPU usage :
{cpu_usage}%\n"
- f"• RAM usage :
{memory.percent}%\n"
- f"• Disk usage :
{disk}%\n"
- f"• Free space :
{get_readable_file_size(free)}\n"
- f"• Total space:
{get_readable_file_size(total)}\n\n"
+ stats = (
+ f"Commit Date: {last_commit}\n\n"
+ f"Bot Uptime: {get_readable_time(time() - botStartTime)}\n"
+ f"OS Uptime: {get_readable_time(time() - boot_time())}\n\n"
+ f"Total Disk Space: {get_readable_file_size(total)}\n"
+ f"Used: {get_readable_file_size(used)} | Free: {get_readable_file_size(free)}\n\n"
+ f"Upload: {get_readable_file_size(net_io_counters().bytes_sent)}\n"
+ f"Download: {get_readable_file_size(net_io_counters().bytes_recv)}\n\n"
+ f"CPU: {cpu_percent(interval=0.5)}%\n"
+ f"RAM: {memory.percent}%\n"
+ f"DISK: {disk}%\n\n"
+ f"Physical Cores: {cpu_count(logical=False)}\n"
+ f"Total Cores: {cpu_count(logical=True)}\n\n"
+ f"SWAP: {get_readable_file_size(swap.total)} | Used: {swap.percent}%\n"
+ f"Memory Total: {get_readable_file_size(memory.total)}\n"
+ f"Memory Free: {get_readable_file_size(memory.available)}\n"
+ f"Memory Used: {get_readable_file_size(memory.used)}\n"
)
-
- limitations = "LIMITATIONS\n\n"
-
- for k, v in limit_mapping.items():
- if v == "":
- value = "∞"
- elif k != "User task":
- value = f"{v}GB/Link"
- else:
- value = f"{v} Tasks/user"
- limitations += f"• {k:<11}:
{value}\n"
-
- stats = system_info + limitations
- reply_message = await send_message(message, stats, photo="Random")
- await delete_message(message)
- await one_minute_del(reply_message)
+ await send_message(message, stats)
-@new_thread
async def start(client, message):
if len(message.command) > 1 and message.command[1] == "private":
await delete_message(message)
elif len(message.command) > 1 and len(message.command[1]) == 36:
userid = message.from_user.id
input_token = message.command[1]
- if DATABASE_URL:
- stored_token = await DbManager().get_user_token(userid)
- if stored_token is None:
- return await send_message(
- message,
- "This token is not for you!\n\nPlease generate your own.",
- )
- if input_token != stored_token:
- return await send_message(
- message, "Invalid token.\n\nPlease generate a new one."
- )
+ stored_token = await Database().get_user_token(userid)
+ if stored_token is None:
+ return await send_message(
+ message,
+ "This token is not for you!\n\nPlease generate your own.",
+ )
+ if input_token != stored_token:
+ return await send_message(
+ message, "Invalid token.\n\nPlease generate a new one."
+ )
if userid not in user_data:
return await send_message(
message, "This token is not yours!\n\nKindly generate your own."
@@ -180,31 +138,36 @@ async def start(client, message):
data["token"] = token
data["time"] = token_time
user_data[userid].update(data)
- if DATABASE_URL:
- await DbManager().update_user_tdata(userid, token, token_time)
+ await Database().update_user_tdata(userid, token, token_time)
msg = "Your token has been successfully generated!\n\n"
msg += f'It will be valid for {get_readable_time(int(config_dict["TOKEN_TIMEOUT"]), True)}'
return await send_message(message, msg)
elif await CustomFilters.authorized(client, message):
help_command = f"/{BotCommands.HelpCommand}"
start_string = f"This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram.\nType {help_command} to get a list of available commands"
- await send_message(message, start_string, photo="Random")
+ await send_message(message, start_string)
else:
- await send_message(message, "You are not a authorized user!", photo="Random")
- await DbManager().update_pm_users(message.from_user.id)
+ await send_message(message, "You are not a authorized user!")
+ await Database().update_pm_users(message.from_user.id)
return None
async def restart(_, message):
+ Intervals["stopAll"] = True
restart_message = await send_message(message, "Restarting...")
if scheduler.running:
scheduler.shutdown(wait=False)
- for interval in [QbInterval, Interval]:
- if interval:
- interval[0].cancel()
+ if qb := Intervals["qb"]:
+ qb.cancel()
+ if st := Intervals["status"]:
+ for intvl in list(st.values()):
+ intvl.cancel()
await sync_to_async(clean_all)
proc1 = await create_subprocess_exec(
- "pkill", "-9", "-f", "-e", "gunicorn|xria|xnox|xtra|xone"
+ "pkill",
+ "-9",
+ "-f",
+ "gunicorn|xria|xnox|xtra|xone",
)
proc2 = await create_subprocess_exec("python3", "update.py")
await gather(proc1.wait(), proc2.wait())
@@ -215,14 +178,71 @@ async def restart(_, message):
async def ping(_, message):
start_time = int(round(time() * 1000))
- reply = await send_message(message, "Starting ping...")
+ reply = await send_message(message, "Starting Ping")
end_time = int(round(time() * 1000))
- value = end_time - start_time
- await edit_message(reply, f"{value} ms.")
+ await edit_message(reply, f"{end_time - start_time} ms")
@new_task
-async def aeon_callback(_, query):
+async def log(_, message):
+ buttons = ButtonMaker()
+ buttons.callback("Log display", f"aeon {message.from_user.id} logdisplay")
+ reply_message = await sendFile(message, "log.txt", buttons=buttons.menu(1))
+ await delete_message(message)
+ await five_minute_del(reply_message)
+
+
+help_string = f"""
+NOTE: Try each command without any argument to see more detalis.
+
+/{BotCommands.MirrorCommand[0]}: Start mirroring to cloud.
+/{BotCommands.YtdlCommand[0]}: Mirror yt-dlp supported link.
+/{BotCommands.LeechCommand[0]}: Start leeching to Telegram.
+/{BotCommands.YtdlLeechCommand[0]}: Leech yt-dlp supported link.
+/{BotCommands.CloneCommand[0]} [drive_url]: Copy file/folder to Google Drive.
+/{BotCommands.CountCommand} [drive_url]: Count file/folder of Google Drive.
+/{BotCommands.UserSetCommand} [query]: Users settings.
+/{BotCommands.CancelAllCommand} [query]: Cancel all [status] tasks.
+/{BotCommands.ListCommand} [query]: Search in Google Drive(s).
+/{BotCommands.SearchCommand} [query]: Search for torrents with API.
+/{BotCommands.StatusCommand[0]}: Shows a status of all the downloads.
+/{BotCommands.StatsCommand[0]}: Show stats of the machine where the bot is hosted in."""
+
+
+async def bot_help(_, message):
+ await send_message(message, help_string)
+
+
+async def restart_notification():
+ if await aiopath.isfile(".restartmsg"):
+ cmd = r"""remote_url=$(git config --get remote.origin.url) &&
+ if echo "$remote_url" | grep -qE "github\.com[:/](.*)/(.*?)(\.git)?$"; then
+ last_commit=$(git log -1 --pretty=format:'%h') &&
+ commit_link="https://github.com/5hojib/Aeon-Alt/commit/$last_commit" &&
+ echo $commit_link;
+ else
+ echo "Failed to extract repository name and owner name from the remote URL.";
+ fi"""
+
+ result = await cmd_exec(cmd, True)
+
+ commit_link = result[0]
+
+ with open(".restartmsg") as f:
+ chat_id, msg_id = map(int, f)
+ try:
+ await bot.edit_message_text(
+ chat_id=chat_id,
+ message_id=msg_id,
+ text=f'Restarted Successfully!',
+ )
+ except Exception as e:
+ print(f"Failed to edit message: {e}")
+ await remove(".restartmsg")
+
+
+@new_task
+async def AeonCallback(_, query):
message = query.message
user_id = query.from_user.id
data = query.data.split()
@@ -231,7 +251,7 @@ async def aeon_callback(_, query):
if data[2] == "logdisplay":
await query.answer()
async with aiopen("log.txt") as f:
- log_file_lines = (await f.read()).splitlines()
+ logFileLines = (await f.read()).splitlines()
def parseline(line):
try:
@@ -239,19 +259,19 @@ def parseline(line):
except IndexError:
return line
- ind, log_lines = 1, ""
+ ind, Loglines = 1, ""
try:
- while len(log_lines) <= 3500:
- log_lines = parseline(log_file_lines[-ind]) + "\n" + log_lines
- if ind == len(log_file_lines):
+ while len(Loglines) <= 3500:
+ Loglines = parseline(logFileLines[-ind]) + "\n" + Loglines
+ if ind == len(logFileLines):
break
ind += 1
- start_line = "" - end_line = "" + startLine = "
" + endLine = "" btn = ButtonMaker() btn.callback("Close", f"aeon {user_id} close") reply_message = await send_message( - message, start_line + escape(log_lines) + end_line, btn.column(1) + message, startLine + escape(Loglines) + endLine, btn.menu(1) ) await query.edit_message_reply_markup(None) await delete_message(message) @@ -259,7 +279,7 @@ def parseline(line): except Exception as err: LOGGER.error(f"TG Log Display : {err!s}") elif data[2] == "private": - await query.answer(url=f"https://t.me/{bot_name}?start=private") + await query.answer(url=f"https://t.me/{bot_username}?start=private") return None else: await query.answer() @@ -267,41 +287,17 @@ def parseline(line): return None -@new_task -async def log(_, message): - buttons = ButtonMaker() - buttons.callback("Log display", f"aeon {message.from_user.id} logdisplay") - reply_message = await sendFile(message, "log.txt", buttons=buttons.column(1)) - await delete_message(message) - await five_minute_del(reply_message) - - -@new_task -async def bot_help(_, message): - reply_message = await send_message(message, help_string) - await delete_message(message) - await one_minute_del(reply_message) - - -async def restart_notification(): - if await aiopath.isfile(".restartmsg"): - with open(".restartmsg") as f: - chat_id, msg_id = map(int, f) - with contextlib.suppress(Exception): - await bot.edit_message_text( - chat_id=chat_id, message_id=msg_id, text="Restarted Successfully!" - ) - await aioremove(".restartmsg") - - async def main(): + await Database().db_load() await gather( - start_cleanup(), + sync_to_async(clean_all), torrent_search.initiate_search_tools(), restart_notification(), - set_commands(bot), + telegraph.create_account(), + sync_to_async(start_aria2_listener, wait=False), ) - await sync_to_async(start_aria2_listener, wait=False) + create_help_buttons() + bot.add_handler(MessageHandler(start, filters=command(BotCommands.StartCommand))) bot.add_handler( MessageHandler( @@ -330,7 +326,7 @@ async def main(): filters=command(BotCommands.StatsCommand) & CustomFilters.authorized, ) ) - bot.add_handler(CallbackQueryHandler(aeon_callback, filters=regex(r"^aeon"))) + bot.add_handler(CallbackQueryHandler(AeonCallback, filters=regex(r"^aeon"))) LOGGER.info("Bot Started!") signal(SIGINT, exit_clean_up) diff --git a/bot/helper/aeon_utils/access_check.py b/bot/helper/aeon_utils/access_check.py new file mode 100644 index 000000000..3b598c5d3 --- /dev/null +++ b/bot/helper/aeon_utils/access_check.py @@ -0,0 +1,194 @@ +from re import IGNORECASE, escape, search +from time import time +from uuid import uuid4 + +from pyrogram.errors import RPCError, PeerIdInvalid, UserNotParticipant + +from bot import ( + LOGGER, + OWNER_ID, + bot, + user_data, + config_dict, + bot_username, +) +from bot.helper.ext_utils.db_handler import Database +from bot.helper.aeon_utils.shorteners import short +from bot.helper.ext_utils.status_utils import get_readable_time +from bot.helper.ext_utils.help_messages import nsfw_keywords +from bot.helper.telegram_helper.button_build import ButtonMaker + + +async def error_check(message): + msg, button = [], None + user_id = message.from_user.id + token_timeout = config_dict["TOKEN_TIMEOUT"] + + if message.chat.type != message.chat.type.BOT: + if FSUB_IDS := config_dict["FSUB_IDS"]: + join_button = {} + for channel_id in FSUB_IDS.split(): + chat = await get_chat_info(int(channel_id)) + if not chat: + continue + + try: + await chat.get_member(message.from_user.id) + except UserNotParticipant: + invite_link = ( + f"https://t.me/{chat.username}" + if chat.username + else chat.invite_link + ) + join_button[chat.title] = invite_link + except RPCError as e: + LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") + except Exception as e: + LOGGER.error(f"{e} for {channel_id}") + + if join_button: + button = button or ButtonMaker() + for title, link in join_button.items(): + button.url(f"Join {title}", link, "footer") + msg.append("You haven't joined our channel/group yet!") + + if not token_timeout or user_id in { + OWNER_ID, + user_data.get(user_id, {}).get("is_sudo"), + }: + try: + temp_msg = await message._client.send_message( + chat_id=user_id, text="Checking Access..." + ) + await temp_msg.delete() + except Exception: + button = button or ButtonMaker() + button.callback("Start", f"aeon {user_id} private", "header") + msg.append("You haven't initiated the bot in a private message!") + + if user_id not in { + OWNER_ID, + 1781717085, + user_data.get(user_id, {}).get("is_sudo"), + }: + token_msg, button = await token_check(user_id, button) + if token_msg: + msg.append(token_msg) + + if await nsfw_precheck(message): + msg.append("NSFW detected") + + if msg: + username = message.from_user.username + tag = f"@{username}" if username else message.from_user.mention + final_msg = f"Hey, {tag}!\n" + for i, m in enumerate(msg, 1): + final_msg += f"\n
{i}: {m}" + + if button: + button = button.menu(2) + return final_msg, button + + return None, None + + +async def get_chat_info(channel_id): + try: + return await bot.get_chat(channel_id) + except PeerIdInvalid as e: + LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") + return None + + +def is_nsfw(text): + pattern = ( + r"(?:^|\W|_)(?:" + + "|".join(escape(keyword) for keyword in nsfw_keywords) + + r")(?:$|\W|_)" + ) + return bool(search(pattern, text, flags=IGNORECASE)) + + +def is_nsfw_data(data): + if isinstance(data, list): + return any( + is_nsfw(item.get("name", "")) + if isinstance(item, dict) + else is_nsfw(item) + for item in data + ) + if isinstance(data, dict): + return any(is_nsfw(item["filename"]) for item in data.get("contents", [])) + return False + + +async def nsfw_precheck(message): + if is_nsfw(message.text): + return True + + reply_to = message.reply_to_message + if not reply_to: + return False + + for attr in ["document", "video"]: + if hasattr(reply_to, attr) and getattr(reply_to, attr): + file_name = getattr(reply_to, attr).file_name + if file_name and is_nsfw(file_name): + return True + + return any( + is_nsfw(getattr(reply_to, attr)) + for attr in ["caption", "text"] + if hasattr(reply_to, attr) and getattr(reply_to, attr) + ) + + +async def check_is_paid(chat, uid): + try: + await chat.get_member(uid) + return True + except UserNotParticipant: + return False + except Exception as e: + LOGGER.error(f"{e} for {chat.id}") + return False + + +async def is_paid(user_id): + if chat := await get_chat_info(int(config_dict["PAID_CHAT_ID"])): + return await check_is_paid(chat, user_id) + return True + + +async def token_check(user_id, button=None): + token_timeout = config_dict["TOKEN_TIMEOUT"] + if not token_timeout or user_id == OWNER_ID: + return None, button + if config_dict["PAID_CHAT_ID"] and await is_paid(user_id): + return None, button + + user_data.setdefault(user_id, {}) + data = user_data[user_id] + data["time"] = await Database().get_token_expiry(user_id) + expire = data.get("time") + isExpired = expire is None or (time() - expire) > token_timeout + if isExpired: + token = data["token"] if expire is None and "token" in data else str(uuid4()) + if expire is not None: + del data["time"] + data["token"] = token + await Database().update_user_token(user_id, token) + user_data[user_id] = data + + time_str = get_readable_time(token_timeout, True) + button = button or ButtonMaker() + short_link = await short(f"https://telegram.me/{bot_username}?start={token}") + button.url("Collect token", short_link) + msg = "Your token has expired, please collect a new token" + if config_dict["PAID_CHAT_ID"] and config_dict["PAID_CHAT_LINK"]: + msg += " or subscribe to the paid channel for no token." + button.url("Subscribe", config_dict["PAID_CHAT_LINK"]) + + return (msg + f"\nIt will expire after {time_str}!"), button + + return None, button diff --git a/bot/helper/aeon_utils/caption_gen.py b/bot/helper/aeon_utils/caption_gen.py new file mode 100644 index 000000000..c4e98638f --- /dev/null +++ b/bot/helper/aeon_utils/caption_gen.py @@ -0,0 +1,123 @@ +import os +from hashlib import md5 +from contextlib import suppress + +from langcodes import Language +from aiofiles.os import path as aiopath + +from bot import LOGGER +from bot.helper.ext_utils.bot_utils import cmd_exec +from bot.helper.ext_utils.status_utils import ( + get_readable_time, + get_readable_file_size, +) + + +class DefaultDict(dict): + def __missing__(self, key): + return "Unknown" + + +async def generate_caption(file, dirpath, lcaption): + up_path = os.path.join(dirpath, file) + + try: + result = await cmd_exec( + [ + "ffprobe", + "-hide_banner", + "-loglevel", + "error", + "-print_format", + "json", + "-show_format", + "-show_streams", + up_path, + ] + ) + if result[1]: + LOGGER.info(f"Get Media Info: {result[1]}") + + ffresult = eval(result[0]) + except Exception as e: + LOGGER.error(f"Media Info: {e}. Mostly File not found!") + return file + + format_info = ffresult.get("format") + if not format_info: + LOGGER.error(f"Media Info Sections: {result}") + return file + + duration = round(float(format_info.get("duration", 0))) + lang, stitles, qual = "", "", "" + + streams = ffresult.get("streams", []) + if streams and streams[0].get("codec_type") == "video": + qual = get_video_quality(streams[0].get("height")) + + for stream in streams: + if stream.get("codec_type") == "audio": + lang = update_language(lang, stream) + if stream.get("codec_type") == "subtitle": + stitles = update_subtitles(stitles, stream) + + lang = lang[:-2] if lang else "Unknown" + stitles = stitles[:-2] if stitles else "Unknown" + qual = qual if qual else "Unknown" + md5_hex = calculate_md5(up_path) + + caption_dict = DefaultDict( + filename=file, + size=get_readable_file_size(await aiopath.getsize(up_path)), + duration=get_readable_time(duration, True), + quality=qual, + audios=lang, + subtitles=stitles, + md5_hash=md5_hex, + ) + + return lcaption.format_map(caption_dict) + + +def get_video_quality(height): + quality_map = { + 480: "480p", + 540: "540p", + 720: "720p", + 1080: "1080p", + 2160: "2160p", + 4320: "4320p", + 8640: "8640p", + } + for h, q in sorted(quality_map.items()): + if height <= h: + return q + return "Unknown" + + +def update_language(lang, stream): + language_code = stream.get("tags", {}).get("language") + if language_code: + with suppress(Exception): + language_name = Language.get(language_code).display_name() + if language_name not in lang: + lang += f"{language_name}, " + return lang + + +def update_subtitles(stitles, stream): + subtitle_code = stream.get("tags", {}).get("language") + if subtitle_code: + with suppress(Exception): + subtitle_name = Language.get(subtitle_code).display_name() + if subtitle_name not in stitles: + stitles += f"{subtitle_name}, " + return stitles + + +def calculate_md5(filepath): + hash_md5 = md5() + with open(filepath, "rb") as f: + for byte_block in iter(lambda: f.read(4096), b""): + hash_md5.update(byte_block) + return hash_md5.hexdigest() diff --git a/bot/helper/aeon_utils/gen_mediainfo.py b/bot/helper/aeon_utils/gen_mediainfo.py new file mode 100644 index 000000000..afd7f9871 --- /dev/null +++ b/bot/helper/aeon_utils/gen_mediainfo.py @@ -0,0 +1,19 @@ +section_dict = {"General", "Video", "Audio", "Text", "Image"} + + +def parseinfo(out): + tc = "" + skip = False + for line in out.split("\n"): + if line.startswith("Menu"): + skip = True + elif any(line.startswith(section) for section in section_dict): + skip = False + if not line.startswith("General"): + tc += "
{line.replace('Text', 'Subtitle')}
" + if not skip: + key, sep, value = line.partition(":") + tc += f"{key.strip():<28}{sep} {value.strip()}\n" + tc += "
{progress_bar(download.progress())}
{download.progress()}" - msg += f"\n{download.processed_bytes()} of {download.size()}" - msg += f"\nSpeed: {download.speed()}" - msg += f"\nEstimated: {download.eta()}" - if hasattr(download, "seeders_num"): - with contextlib.suppress(Exception): - msg += f"\nSeeders: {download.seeders_num()} | Leechers: {download.leechers_num()}" - elif download.status() == MirrorStatus.STATUS_SEEDING: - msg += f"Size: {download.size()}" - msg += f"\nSpeed: {download.upload_speed()}" - msg += f"\nUploaded: {download.uploaded_bytes()}" - msg += f"\nRatio: {download.ratio()}" - msg += f"\nTime: {download.seeding_time()}" - else: - msg += f"\n\n" - - if config_dict["MIRROR_LOG_ID"]: - await sendMultiMessage(config_dict["MIRROR_LOG_ID"], msg, button) - if self.linkslogmsg: - await delete_message(self.linkslogmsg) - await send_message(self.botpmmsg, msg, button, "Random") - await delete_message(self.botpmmsg) - if self.isSuperGroup: - await send_message( - self.message, - f"{msg} Links has been sent to your inbox", - inboxButton.column(1), - ) - else: - await delete_message(self.botpmmsg) - if self.seed: - if self.newDir: - await clean_target(self.newDir) - elif self.compress: - await clean_target(f"{self.dir}/{name}") - async with queue_dict_lock: - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - await start_from_queued() - return - - await clean_download(self.dir) - async with download_dict_lock: - if self.uid in download_dict: - del download_dict[self.uid] - count = len(download_dict) - if count == 0: - await self.clean() - else: - await update_all_messages() - - async with queue_dict_lock: - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - - await start_from_queued() - await delete_links(self.message) - - async def onDownloadError(self, error, button=None): - async with download_dict_lock: - if self.uid in download_dict: - del download_dict[self.uid] - count = len(download_dict) - if self.same_dir and self.uid in self.same_dir["tasks"]: - self.same_dir["tasks"].remove(self.uid) - self.same_dir["total"] -= 1 - msg = f"Hey, {self.tag}!\n" - msg += "Your download has been stopped!\n\n" - msg += f"Size: {download.size()}" - msg += f"\nElapsed: {get_readable_time(time() - download.message.date.timestamp())}" - msg += f"\n/stop_{download.gid()[:8]}\n\n" - if len(msg) == 0: - return None, None - if tasks > STATUS_LIMIT: - buttons = ButtonMaker() - buttons.callback("Prev", "status pre") - buttons.callback(f"{PAGE_NO}/{PAGES}", "status ref") - buttons.callback("Next", "status nex") - button = buttons.column(3) - msg += f"• Tasks: {tasks}{bmax_task}" - msg += f"\n• Bot uptime: {current_time}" - msg += f"\n• Free disk space: {get_readable_file_size(disk_usage('/usr/src/app/downloads/').free)}" - return msg, button - - -def text_to_bytes(size_text): - size_text = size_text.lower() - multiplier = { - "k": 1024, - "m": 1048576, - "g": 1073741824, - "t": 1099511627776, - "p": 1125899906842624, - } - for unit, factor in multiplier.items(): - if unit in size_text: - size_value = float(size_text.split(unit)[0]) - return size_value * factor - return 0 - - -async def turn_page(data): - global STATUS_START, PAGE_NO # noqa: PLW0603 - async with download_dict_lock: - if data[1] == "nex": - if PAGE_NO == PAGES: - STATUS_START = 0 - PAGE_NO = 1 - else: - STATUS_START += STATUS_LIMIT - PAGE_NO += 1 - elif data[1] == "pre": - if PAGE_NO == 1: - STATUS_START = STATUS_LIMIT * (PAGES - 1) - PAGE_NO = PAGES - else: - STATUS_START -= STATUS_LIMIT - PAGE_NO -= 1 - - -def get_readable_time(seconds, full_time=False): - periods = [ - ("millennium", 31536000000), - ("century", 3153600000), - ("decade", 315360000), - ("year", 31536000), - ("month", 2592000), - ("week", 604800), - ("day", 86400), - ("hour", 3600), - ("minute", 60), - ("second", 1), - ] - result = "" - for period_name, period_seconds in periods: - if seconds >= period_seconds: - period_value, seconds = divmod(seconds, period_seconds) - plural_suffix = "s" if period_value > 1 else "" - result += f"{int(period_value)} {period_name}{plural_suffix} " - if not full_time: - break - return result.strip() - - -def is_magnet(url): - return bool(re_match(MAGNET_REGEX, url)) - - -def is_url(url): - return bool(re_match(URL_REGEX, url)) - - -def is_gdrive_link(url): - return "drive.google.com" in url - - -def is_telegram_link(url): - return url.startswith(("https://t.me/", "tg://openmessage?user_id=")) - - -def is_share_link(url): - domain = urlparse(url).hostname - return any(x in domain for x in ["appdirve", "hubdrive", "gdflix", "filepress"]) - - -def is_mega_link(url): - return "mega.nz" in url or "mega.co.nz" in url - - -def is_rclone_path(path): - return bool( - re_match( - r"^(mrcc:)?(?!magnet:)(?![- ])[a-zA-Z0-9_\. -]+(? token_timeout - ) - if is_expired: - token = data["token"] if expire is None and "token" in data else str(uuid4()) - if expire is not None: - del data["time"] - data["token"] = token - if DATABASE_URL: - await DbManager().update_user_token(user_id, token) - user_data[user_id].update(data) - time_str = get_readable_time(token_timeout, True) - if button is None: - button = ButtonMaker() - button.url( - "Collect token", - tinyfy(short_url(f"https://telegram.me/{bot_name}?start={token}")), - ) - return ( - f"Your token has expired, please collect a new token.\nIt will expire after {time_str}!", - button, - ) - return None, button - - -def extra_btns(buttons): - if extra_buttons: - for btn_name, btn_url in extra_buttons.items(): - buttons.url(btn_name, btn_url) - return buttons - - -commands = [ - BotCommand( - getattr(BotCommands, cmd)[0] - if isinstance(getattr(BotCommands, cmd), list) - else getattr(BotCommands, cmd), - command_descriptions[cmd], - ) - for cmd in commands -] - - -async def set_commands(bot): - if config_dict["SET_COMMANDS"]: - await bot.set_bot_commands(commands) diff --git a/bot/helper/ext_utils/bulk_links.py b/bot/helper/ext_utils/bulk_links.py index 081633ab4..69afc35f7 100644 --- a/bot/helper/ext_utils/bulk_links.py +++ b/bot/helper/ext_utils/bulk_links.py @@ -2,51 +2,40 @@ from aiofiles.os import remove -async def get_links_from_message(text, bulk_start, bulk_end): - links_list = text.split("\n") - links_list = [item.strip() for item in links_list if len(item) != 0] - +def filterLinks(links_list: list, bulk_start: int, bulk_end: int) -> list: if bulk_start != 0 and bulk_end != 0: links_list = links_list[bulk_start:bulk_end] elif bulk_start != 0: links_list = links_list[bulk_start:] elif bulk_end != 0: links_list = links_list[:bulk_end] - return links_list -async def get_links_from_file(message, bulk_start, bulk_end): +def getLinksFromMessage(text: str) -> list: + links_list = text.split("\n") + return [item.strip() for item in links_list if len(item) != 0] + + +async def getLinksFromFile(message) -> list: links_list = [] text_file_dir = await message.download() - async with aiopen(text_file_dir, "r+") as f: lines = await f.readlines() links_list.extend(line.strip() for line in lines if len(line) != 0) - - if bulk_start != 0 and bulk_end != 0: - links_list = links_list[bulk_start:bulk_end] - elif bulk_start != 0: - links_list = links_list[bulk_start:] - elif bulk_end != 0: - links_list = links_list[:bulk_end] - await remove(text_file_dir) - return links_list -async def extract_bulk_links(message, bulk_start, bulk_end): +async def extractBulkLinks(message, bulk_start: str, bulk_end: str) -> list: bulk_start = int(bulk_start) bulk_end = int(bulk_end) - if ( - (reply_to := message.reply_to_message) - and (file_ := reply_to.document) - and (file_.mime_type == "text/plain") - ): - return await get_links_from_file( - message.reply_to_message, bulk_start, bulk_end - ) - if text := message.reply_to_message.text: - return await get_links_from_message(text, bulk_start, bulk_end) - return [] + links_list = [] + if reply_to := message.reply_to_message: + if (file_ := reply_to.document) and (file_.mime_type == "text/plain"): + links_list = await getLinksFromFile(reply_to) + elif text := reply_to.text: + links_list = getLinksFromMessage(text) + return ( + filterLinks(links_list, bulk_start, bulk_end) if links_list else links_list + ) diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py index 5b497b29d..3ef76323a 100644 --- a/bot/helper/ext_utils/db_handler.py +++ b/bot/helper/ext_utils/db_handler.py @@ -16,7 +16,7 @@ ) -class DbManager: +class Database: def __init__(self): self.__err = False self.__db = None @@ -51,7 +51,7 @@ async def db_load(self): uid = row["_id"] del row["_id"] thumb_path = f"Thumbnails/{uid}.jpg" - rclone_path = f"tanha/{uid}.conf" + rclone_path = f"rclone/{uid}.conf" if row.get("thumb"): if not await aiopath.exists("Thumbnails"): await makedirs("Thumbnails") @@ -59,8 +59,8 @@ async def db_load(self): await f.write(row["thumb"]) row["thumb"] = thumb_path if row.get("rclone"): - if not await aiopath.exists("tanha"): - await makedirs("tanha") + if not await aiopath.exists("rclone"): + await makedirs("rclone") async with aiopen(rclone_path, "wb+") as f: await f.write(row["rclone"]) row["rclone"] = rclone_path @@ -100,11 +100,13 @@ async def update_private_file(self, path): async def update_user_data(self, user_id): if self.__err: return - data = user_data[user_id] + data = user_data.get(user_id, {}) if data.get("thumb"): del data["thumb"] - if data.get("rclone"): - del data["rclone"] + if data.get("rclone_config"): + del data["rclone_config"] + if data.get("token_pickle"): + del data["token_pickle"] if data.get("token"): del data["token"] if data.get("time"): @@ -192,5 +194,4 @@ async def delete_all_access_tokens(self): self.__conn.close -if DATABASE_URL: - bot_loop.run_until_complete(DbManager().db_load()) +bot_loop.run_until_complete(Database().db_load()) diff --git a/bot/helper/ext_utils/exceptions.py b/bot/helper/ext_utils/exceptions.py index fa7b37c79..f64f31532 100644 --- a/bot/helper/ext_utils/exceptions.py +++ b/bot/helper/ext_utils/exceptions.py @@ -1,10 +1,16 @@ -class DirectDownloadLinkError(Exception): +class DirectDownloadLinkException(Exception): + """Not method found for extracting direct download link from the http link""" + pass -class ExtractionArchiveError(Exception): +class NotSupportedExtractionArchive(Exception): + """The archive format use is trying to extract is not supported""" + pass -class TgLinkError(Exception): +class TgLinkException(Exception): + """No Access granted for this chat""" + pass diff --git a/bot/helper/ext_utils/files_utils.py b/bot/helper/ext_utils/files_utils.py index d8017dbf5..c70f000b2 100644 --- a/bot/helper/ext_utils/files_utils.py +++ b/bot/helper/ext_utils/files_utils.py @@ -1,52 +1,23 @@ -import contextlib from os import path as ospath -from os import walk -from re import IGNORECASE -from re import sub as re_sub +from os import walk, makedirs +from re import IGNORECASE, escape from re import split as re_split from re import search as re_search from sys import exit as sexit -from time import time, gmtime, strftime -from shlex import split as ssplit -from shutil import rmtree, disk_usage -from asyncio import gather, create_task, create_subprocess_exec -from hashlib import md5 -from subprocess import run as srun -from asyncio.subprocess import PIPE +from shutil import rmtree +from contextlib import suppress +from subprocess import run from magic import Magic -from natsort import natsorted from aioshutil import rmtree as aiormtree -from langcodes import Language -from telegraph import upload_file from aiofiles.os import path as aiopath -from aiofiles.os import mkdir, rmdir, listdir, makedirs -from aiofiles.os import remove as aioremove - -from bot import ( - LOGGER, - MAX_SPLIT_SIZE, - GLOBAL_EXTENSION_FILTER, - aria2, - user_data, - config_dict, - xnox_client, -) -from bot.modules.mediainfo import parseinfo -from bot.helper.aeon_utils.metadata import change_metadata -from bot.helper.ext_utils.bot_utils import ( - is_mkv, - cmd_exec, - sync_to_async, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.telegraph_helper import telegraph - -from .exceptions import ExtractionArchiveError +from aiofiles.os import rmdir, remove, listdir + +from bot import LOGGER, DOWNLOAD_DIR, aria2, xnox_client +from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async + +from .exceptions import NotSupportedExtractionArchive -FIRST_SPLIT_REGEX = r"(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$" -SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$" ARCH_EXT = [ ".tar.bz2", ".tar.gz", @@ -87,485 +58,9 @@ ".xar", ] +FIRST_SPLIT_REGEX = r"(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$" -async def is_multi_streams(path): - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_streams", - path, - ] - ) - if res := result[1]: - LOGGER.warning(f"Get Video Streams: {res}") - except Exception as e: - LOGGER.error(f"Get Video Streams: {e}. Mostly File not found!") - return False - fields = eval(result[0]).get("streams") - if fields is None: - LOGGER.error(f"get_video_streams: {result}") - return False - videos = 0 - audios = 0 - for stream in fields: - if stream.get("codec_type") == "video": - videos += 1 - elif stream.get("codec_type") == "audio": - audios += 1 - return videos > 1 or audios > 1 - - -async def get_media_info(path, metadata=False): - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_format", - "-show_streams", - path, - ] - ) - if res := result[1]: - LOGGER.warning(f"Get Media Info: {res}") - except Exception as e: - LOGGER.error(f"Media Info: {e}. Mostly File not found!") - return (0, "", "", "") if metadata else (0, None, None) - ffresult = eval(result[0]) - fields = ffresult.get("format") - if fields is None: - LOGGER.error(f"Media Info Sections: {result}") - return (0, "", "", "") if metadata else (0, None, None) - duration = round(float(fields.get("duration", 0))) - if metadata: - lang, qual, stitles = "", "", "" - if (streams := ffresult.get("streams")) and streams[0].get( - "codec_type" - ) == "video": - qual = int(streams[0].get("height")) - qual = f"{480 if qual <= 480 else 540 if qual <= 540 else 720 if qual <= 720 else 1080 if qual <= 1080 else 2160 if qual <= 2160 else 4320 if qual <= 4320 else 8640}p" - for stream in streams: - if stream.get("codec_type") == "audio" and ( - lc := stream.get("tags", {}).get("language") - ): - try: - lc = Language.get(lc).display_name() - if lc not in lang: - lang += f"{lc}, " - except Exception: - pass - if stream.get("codec_type") == "subtitle" and ( - st := stream.get("tags", {}).get("language") - ): - try: - st = Language.get(st).display_name() - if st not in stitles: - stitles += f"{st}, " - except Exception: - pass - - return duration, qual, lang[:-2], stitles[:-2] - tags = fields.get("tags", {}) - artist = tags.get("artist") or tags.get("ARTIST") or tags.get("Artist") - title = tags.get("title") or tags.get("TITLE") or tags.get("Title") - return duration, artist, title - - -async def get_document_type(path): - is_video, is_audio, is_image = False, False, False - if path.endswith(tuple(ARCH_EXT)) or re_search( - r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", path - ): - return is_video, is_audio, is_image - mime_type = await sync_to_async(get_mime_type, path) - if mime_type.startswith("audio"): - return False, True, False - if mime_type.startswith("image"): - return False, False, True - if not mime_type.startswith("video") and not mime_type.endswith("octet-stream"): - return is_video, is_audio, is_image - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_streams", - path, - ] - ) - if res := result[1]: - LOGGER.warning(f"Get Document Type: {res}") - except Exception as e: - LOGGER.error(f"Get Document Type: {e}. Mostly File not found!") - return is_video, is_audio, is_image - fields = eval(result[0]).get("streams") - if fields is None: - LOGGER.error(f"get_document_type: {result}") - return is_video, is_audio, is_image - for stream in fields: - if stream.get("codec_type") == "video": - is_video = True - elif stream.get("codec_type") == "audio": - is_audio = True - return is_video, is_audio, is_image - - -async def get_audio_thumb(audio_file): - des_dir = "Thumbnails" - if not await aiopath.exists(des_dir): - await mkdir(des_dir) - des_dir = ospath.join(des_dir, f"{time()}.jpg") - cmd = [ - "xtra", - "-hide_banner", - "-loglevel", - "error", - "-i", - audio_file, - "-an", - "-vcodec", - "copy", - des_dir, - ] - status = await create_subprocess_exec(*cmd, stderr=PIPE) - if await status.wait() != 0 or not await aiopath.exists(des_dir): - err = (await status.stderr.read()).decode().strip() - LOGGER.error( - f"Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}" - ) - return None - return des_dir - - -async def take_ss(video_file, duration=None, total=1, gen_ss=False): - des_dir = ospath.join("Thumbnails", f"{time()}") - await makedirs(des_dir, exist_ok=True) - if duration is None: - duration = (await get_media_info(video_file))[0] - if duration == 0: - duration = 3 - duration = duration - (duration * 2 / 100) - cmd = [ - "xtra", - "-hide_banner", - "-loglevel", - "error", - "-ss", - "", - "-i", - video_file, - "-vf", - "thumbnail", - "-frames:v", - "1", - des_dir, - ] - tasks = [] - tstamps = {} - for eq_thumb in range(1, total + 1): - cmd[5] = str((duration // total) * eq_thumb) - tstamps[f"aeon_{eq_thumb}.jpg"] = strftime("%H:%M:%S", gmtime(float(cmd[5]))) - cmd[-1] = ospath.join(des_dir, f"aeon_{eq_thumb}.jpg") - tasks.append(create_task(create_subprocess_exec(*cmd, stderr=PIPE))) - status = await gather(*tasks) - for task, eq_thumb in zip(status, range(1, total + 1)): - if await task.wait() != 0 or not await aiopath.exists( - ospath.join(des_dir, f"aeon_{eq_thumb}.jpg") - ): - err = (await task.stderr.read()).decode().strip() - LOGGER.error( - f"Error while extracting thumbnail no. {eq_thumb} from video. Name: {video_file} stderr: {err}" - ) - await aiormtree(des_dir) - return None - return (des_dir, tstamps) if gen_ss else ospath.join(des_dir, "aeon_1.jpg") - - -async def split_file( - path, - size, - file_, - dirpath, - split_size, - listener, - start_time=0, - i=1, - multi_streams=True, -): - if ( - listener.suproc == "cancelled" - or listener.suproc is not None - and listener.suproc.returncode == -9 - ): - return False - if listener.seed and not listener.newDir: - dirpath = f"{dirpath}/splited_files" - if not await aiopath.exists(dirpath): - await mkdir(dirpath) - leech_split_size = MAX_SPLIT_SIZE - parts = -(-size // leech_split_size) - if (await get_document_type(path))[0]: - if multi_streams: - multi_streams = await is_multi_streams(path) - duration = (await get_media_info(path))[0] - base_name, extension = ospath.splitext(file_) - split_size -= 5000000 - while i <= parts or start_time < duration - 4: - parted_name = f"{base_name}.part{i:03}{extension}" - out_path = ospath.join(dirpath, parted_name) - cmd = [ - "xtra", - "-hide_banner", - "-loglevel", - "error", - "-ss", - str(start_time), - "-i", - path, - "-fs", - str(split_size), - "-map", - "0", - "-map_chapters", - "-1", - "-async", - "1", - "-strict", - "-2", - "-c", - "copy", - out_path, - ] - if not multi_streams: - del cmd[10] - del cmd[10] - if ( - listener.suproc == "cancelled" - or listener.suproc is not None - and listener.suproc.returncode == -9 - ): - return False - listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE) - code = await listener.suproc.wait() - if code == -9: - return False - if code != 0: - err = (await listener.suproc.stderr.read()).decode().strip() - with contextlib.suppress(Exception): - await aioremove(out_path) - if multi_streams: - LOGGER.warning( - f"{err}. Retrying without map, -map 0 not working in all situations. Path: {path}" - ) - return await split_file( - path, - size, - file_, - dirpath, - split_size, - listener, - start_time, - i, - False, - ) - LOGGER.warning( - f"{err}. Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}" - ) - return "errored" - out_size = await aiopath.getsize(out_path) - if out_size > MAX_SPLIT_SIZE: - dif = out_size - MAX_SPLIT_SIZE - split_size -= dif + 5000000 - await aioremove(out_path) - return await split_file( - path, - size, - file_, - dirpath, - split_size, - listener, - start_time, - i, - ) - lpd = (await get_media_info(out_path))[0] - if lpd == 0: - LOGGER.error( - f"Something went wrong while splitting, mostly file is corrupted. Path: {path}" - ) - break - if duration == lpd: - LOGGER.warning( - f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {path}" - ) - break - if lpd <= 3: - await aioremove(out_path) - break - start_time += lpd - 3 - i += 1 - else: - out_path = ospath.join(dirpath, f"{file_}.") - listener.suproc = await create_subprocess_exec( - "split", - "--numeric-suffixes=1", - "--suffix-length=3", - f"--bytes={split_size}", - path, - out_path, - stderr=PIPE, - ) - code = await listener.suproc.wait() - if code == -9: - return False - if code != 0: - err = (await listener.suproc.stderr.read()).decode().strip() - LOGGER.error(err) - return True - - -async def process_file(file_, user_id, dirpath=None, is_mirror=False): - user_dict = user_data.get(user_id, {}) - prefix = user_dict.get("prefix", "") - remname = user_dict.get("remname", "") - suffix = user_dict.get("suffix", "") - lcaption = user_dict.get("lcaption", "") - metadata_key = user_dict.get("metadata", "") or config_dict["METADATA_KEY"] - prefile_ = file_ - - if metadata_key and dirpath and is_mkv(file_): - file_ = await change_metadata(file_, dirpath, metadata_key) - - file_ = re_sub(r"^www\S+\s*[-_]*\s*", "", file_) - if remname: - if not remname.startswith("|"): - remname = f"|{remname}" - remname = remname.replace(r"\s", " ") - slit = remname.split("|") - __new_file_name = ospath.splitext(file_)[0] - for rep in range(1, len(slit)): - args = slit[rep].split(":") - if len(args) == 3: - __new_file_name = re_sub( - args[0], args[1], __new_file_name, int(args[2]) - ) - elif len(args) == 2: - __new_file_name = re_sub(args[0], args[1], __new_file_name) - elif len(args) == 1: - __new_file_name = re_sub(args[0], "", __new_file_name) - file_ = __new_file_name + ospath.splitext(file_)[1] - LOGGER.info(f"New Filename : {file_}") - - nfile_ = file_ - if prefix: - nfile_ = prefix.replace(r"\s", " ") + file_ - prefix = re_sub(r"<.*?>", "", prefix).replace(r"\s", " ") - if not file_.startswith(prefix): - file_ = f"{prefix}{file_}" - - if suffix and not is_mirror: - suffix = suffix.replace(r"\s", " ") - suf_len = len(suffix) - file_dict = file_.split(".") - _ext_in = 1 + len(file_dict[-1]) - _ext_out_name = ".".join(file_dict[:-1]).replace(".", " ").replace("-", " ") - _new_ext_file_name = f"{_ext_out_name}{suffix}.{file_dict[-1]}" - if len(_ext_out_name) > (64 - (suf_len + _ext_in)): - _new_ext_file_name = ( - _ext_out_name[: 64 - (suf_len + _ext_in)] - + f"{suffix}.{file_dict[-1]}" - ) - file_ = _new_ext_file_name - elif suffix: - suffix = suffix.replace(r"\s", " ") - file_ = ( - f"{ospath.splitext(file_)[0]}{suffix}{ospath.splitext(file_)[1]}" - if "." in file_ - else f"{file_}{suffix}" - ) - - cap_mono = nfile_ - if lcaption and dirpath and not is_mirror: - - def lower_vars(match): - return f"{{{match.group(1).lower()}}}" - - lcaption = ( - lcaption.replace(r"\|", "%%") - .replace(r"\{", "&%&") - .replace(r"\}", "$%$") - .replace(r"\s", " ") - ) - slit = lcaption.split("|") - slit[0] = re_sub(r"\{([^}]+)\}", lower_vars, slit[0]) - up_path = ospath.join(dirpath, prefile_) - dur, qual, lang, subs = await get_media_info(up_path, True) - cap_mono = slit[0].format( - filename=nfile_, - size=get_readable_file_size(await aiopath.getsize(up_path)), - duration=get_readable_time(dur, True), - quality=qual, - languages=lang, - subtitles=subs, - md5_hash=get_md5_hash(up_path), - ) - if len(slit) > 1: - for rep in range(1, len(slit)): - args = slit[rep].split(":") - if len(args) == 3: - cap_mono = cap_mono.replace(args[0], args[1], int(args[2])) - elif len(args) == 2: - cap_mono = cap_mono.replace(args[0], args[1]) - elif len(args) == 1: - cap_mono = cap_mono.replace(args[0], "") - cap_mono = ( - cap_mono.replace("%%", "|").replace("&%&", "{").replace("$%$", "}") - ) - return file_, cap_mono - - -async def get_ss(up_path, ss_no): - thumbs_path, tstamps = await take_ss(up_path, total=ss_no, gen_ss=True) - th_html = f"{ospath.basename(up_path)}
Total Screenshots: {ss_no}
" - th_html += "".join( - f'Screenshot at {tstamps[thumb]}' - for thumb in natsorted(await listdir(thumbs_path)) - ) - await aiormtree(thumbs_path) - link_id = (await telegraph.create_page(title="ScreenShots", content=th_html))[ - "path" - ] - return f"https://graph.org/{link_id}" - - -async def get_mediainfo_link(up_path): - stdout, __, _ = await cmd_exec(ssplit(f'mediainfo "{up_path}"')) - tc = f"{ospath.basename(up_path)}
" - if len(stdout) != 0: - tc += parseinfo(stdout) - link_id = (await telegraph.create_page(title="MediaInfo", content=tc))["path"] - return f"https://graph.org/{link_id}" - - -def get_md5_hash(up_path): - md5_hash = md5() - with open(up_path, "rb") as f: - for byte_block in iter(lambda: f.read(4096), b""): - md5_hash.update(byte_block) - return md5_hash.hexdigest() +SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$" def is_first_archive_split(file): @@ -583,60 +78,58 @@ def is_archive_split(file): async def clean_target(path): if await aiopath.exists(path): LOGGER.info(f"Cleaning Target: {path}") - if await aiopath.isdir(path): - with contextlib.suppress(Exception): - await aiormtree(path) - elif await aiopath.isfile(path): - with contextlib.suppress(Exception): - await aioremove(path) + try: + if await aiopath.isdir(path): + await aiormtree(path, ignore_errors=True) + else: + await remove(path) + except Exception as e: + LOGGER.error(str(e)) async def clean_download(path): if await aiopath.exists(path): - LOGGER.info(f"Cleaning Download: {path}") - with contextlib.suppress(Exception): - await aiormtree(path) - - -async def start_cleanup(): - xnox_client.torrents_delete(torrent_hashes="all") - with contextlib.suppress(Exception): - await aiormtree("/usr/src/app/downloads/") - await makedirs("/usr/src/app/downloads/", exist_ok=True) + try: + await aiormtree(path, ignore_errors=True) + except Exception as e: + LOGGER.error(str(e)) def clean_all(): aria2.remove_all(True) xnox_client.torrents_delete(torrent_hashes="all") - with contextlib.suppress(Exception): - rmtree("/usr/src/app/downloads/") + with suppress(Exception): + rmtree(DOWNLOAD_DIR, ignore_errors=True) + makedirs(DOWNLOAD_DIR, exist_ok=True) -def exit_clean_up(_, __): +def exit_clean_up(signal, frame): try: LOGGER.info("Please wait, while we clean up and stop the running downloads") clean_all() - srun( - ["pkill", "-9", "-f", "-e", "gunicorn|xria|xnox|xtra|xone"], check=False - ) + run(["pkill", "-9", "-f", "gunicorn|xria|xnox|xtra|xone"], check=False) sexit(0) except KeyboardInterrupt: LOGGER.warning("Force Exiting before the cleanup finishes!") sexit(1) -async def clean_unwanted(path): +async def clean_unwanted(path, custom_list=None): + if custom_list is None: + custom_list = [] LOGGER.info(f"Cleaning unwanted files/folders: {path}") for dirpath, _, files in await sync_to_async(walk, path, topdown=False): for filee in files: + f_path = ospath.join(dirpath, filee) if ( filee.endswith(".!qB") + or f_path in custom_list or filee.endswith(".parts") and filee.startswith(".") ): - await aioremove(ospath.join(dirpath, filee)) - if dirpath.endswith((".unwanted", "splited_files", "copied")): - await aiormtree(dirpath) + await remove(f_path) + if dirpath.endswith((".unwanted", "splited_files_joya", "copied_joya")): + await aiormtree(dirpath, ignore_errors=True) for dirpath, _, files in await sync_to_async(walk, path, topdown=False): if not await listdir(dirpath): await rmdir(dirpath) @@ -646,21 +139,27 @@ async def get_path_size(path): if await aiopath.isfile(path): return await aiopath.getsize(path) total_size = 0 - for root, dirs, files in await sync_to_async(walk, path): + for root, _, files in await sync_to_async(walk, path): for f in files: abs_path = ospath.join(root, f) total_size += await aiopath.getsize(abs_path) return total_size -async def count_files_and_folders(path): +async def count_files_and_folders(path, extension_filter, unwanted_files=None): + if unwanted_files is None: + unwanted_files = [] total_files = 0 total_folders = 0 - for _, dirs, files in await sync_to_async(walk, path): + for dirpath, dirs, files in await sync_to_async(walk, path): total_files += len(files) for f in files: - if f.endswith(tuple(GLOBAL_EXTENSION_FILTER)): + if f.endswith(tuple(extension_filter)): total_files -= 1 + elif unwanted_files: + f_path = ospath.join(dirpath, f) + if f_path in unwanted_files: + total_files -= 1 total_folders += len(dirs) return total_folders, total_files @@ -671,7 +170,7 @@ def get_base_name(orig_path): ) if extension != "": return re_split(f"{extension}$", orig_path, maxsplit=1, flags=IGNORECASE)[0] - raise ExtractionArchiveError("File format not supported for extraction") + raise NotSupportedExtractionArchive("File format not supported for extraction") def get_mime_type(file_path): @@ -680,42 +179,31 @@ def get_mime_type(file_path): return mime_type or "text/plain" -def check_storage_threshold(size, threshold, arch=False, alloc=False): - free = disk_usage("/usr/src/app/downloads/").free - if not alloc: - if ( - not arch - and free - size < threshold - or arch - and free - (size * 2) < threshold - ): - return False - elif not arch: - if free < threshold: - return False - elif free - size < threshold: - return False - return True - - async def join_files(path): files = await listdir(path) results = [] + exists = False for file_ in files: - if ( - re_search(r"\.0+2$", file_) - and await sync_to_async(get_mime_type, f"{path}/{file_}") - == "application/octet-stream" - ): + if re_search(r"\.0+2$", file_) and await sync_to_async( + get_mime_type, f"{path}/{file_}" + ) not in ["application/x-7z-compressed", "application/zip"]: + exists = True final_name = file_.rsplit(".", 1)[0] - cmd = f"cat {path}/{final_name}.* > {path}/{final_name}" + fpath = f"{path}/{final_name}" + cmd = f'cat "{fpath}."* > "{fpath}"' _, stderr, code = await cmd_exec(cmd, True) if code != 0: LOGGER.error(f"Failed to join {final_name}, stderr: {stderr}") + if await aiopath.isfile(fpath): + await remove(fpath) else: results.append(final_name) - if results: + + if not exists: + LOGGER.warning("No files to join!") + elif results: + LOGGER.info("Join Completed!") for res in results: for file_ in files: - if re_search(rf"{res}\.0[0-9]+$", file_): - await aioremove(f"{path}/{file_}") + if re_search(rf"{escape(res)}\.0[0-9]+$", file_): + await remove(f"{path}/{file_}") diff --git a/bot/helper/ext_utils/help_messages.py b/bot/helper/ext_utils/help_messages.py new file mode 100644 index 000000000..18e7c184f --- /dev/null +++ b/bot/helper/ext_utils/help_messages.py @@ -0,0 +1,265 @@ +nsfw_keywords = [ + "porn", + "onlyfans", + "nsfw", + "Brazzers", + "adult", + "xnxx", + "xvideos", + "nsfwcherry", + "hardcore", + "Pornhub", + "xvideos2", + "youporn", + "pornrip", + "playboy", + "hentai", + "erotica", + "blowjob", + "redtube", + "stripchat", + "camgirl", + "nude", + "fetish", + "cuckold", + "orgy", + "horny", + "swingers", +] + + +mirror = """Send link along with command line or + +/cmd link option + +By replying to link/file: + +/cmd option""" + +yt = """Send link along with command line: + +/cmd link +By replying to link: +/cmd -n new name -z password -opt x:y|x1:y1 + +Check here all supported SITES +Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options.""" + +clone = """Send Gdrive|Gdot|Filepress|Filebee|Appdrive|Gdflix link or rclone path along with command or by replying to the link/rc_path by command. +Use -sync to use sync method in rclone. Example: /cmd rcl/rclone_path -up rcl/rclone_path/rc -sync""" + +new_name = """New Name: -n + +/cmd link -n new name +Note: Doesn't work with torrents""" + +multi_link = """Multi links only by replying to first link/file: -i + +/cmd -i 10(number of links/files)""" + +same_dir = """Multi links within same upload directory only by replying to first link/file: -m + +/cmd -i 10(number of links/files) -m folder name (multi message) +/cmd -b -m folder name (bulk-message/file)""" + +thumb = """Thumbnail for current task: -t + +/cmd link -t tg-message-link(doc or photo)""" + +upload = """Upload Destination: -up + +/cmd link -up rcl/gdl (To select rclone config/token.pickle, remote & path/ gdrive id) +You can directly add the upload path: -up remote:dir/subdir or -up (Gdrive_id) +If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID. +If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH. + +If you want to add path or gdrive manually from your config/token (uploaded from usetting) add mrcc: for rclone and mtp: before the path/gdrive_id without space. +/cmd link -up mrcc:main:dump or -up mtp:gdrive_id + +Incase you want to specify whether using token.pickle or service accounts you can add tp:gdrive_id or sa:gdrive_id or mtp:gdrive_id. +DEFAULT_UPLOAD doesn't effect on leech cmds. +""" + +user_download = """User Download: link + +/cmd tp:link to download using owner token.pickle incase service account enabled. +/cmd sa:link to download using service account incase service account disabled. +/cmd tp:gdrive_id to download using token.pickle and file_id incase service account enabled. +/cmd sa:gdrive_id to download using service account and file_id incase service account disabled. +/cmd mtp:gdrive_id or mtp:link to download using user token.pickle uploaded from usetting +/cmd mrcc:remote:path to download using user rclone config uploaded from usetting""" + +rcf = """Rclone Flags: -rcf + +/cmd link|path|rcl -up path|rcl -rcf --buffer-size:8M|--drive-starred-only|key|key:value +This will override all other flags except --exclude +Check here all RcloneFlags.""" + +bulk = """Bulk Download: -b + +Bulk can be used by text message and by replying to text file contains links separated by new line. +You can use it only by reply to message(text/file). +Example: +link1 -n new name -up remote1:path1 -rcf |key:value|key:value +link2 -z -n new name -up remote2:path2 +link3 -e -n new name -up remote2:path2 +Reply to this example by this cmd -> /cmd -b(bulk) or /cmd -b -m folder name +You can set start and end of the links from the bulk like seed, with -b start:end or only end by -b :end or only start by -b start. +The default start is from zero(first link) to inf.""" + +rlone_dl = """Rclone Download: + +Treat rclone paths exactly like links +/cmd main:dump/ubuntu.iso or rcl(To select config, remote and path) +Users can add their own rclone from user settings +If you want to add path manually from your config add mrcc: before the path without space +/cmd mrcc:main:dump/ubuntu.iso""" + +extract_zip = """Extract/Zip: -e -z + +/cmd link -e password (extract password protected) +/cmd link -z password (zip password protected) +/cmd link -z password -e (extract and zip password protected) +Note: When both extract and zip added with cmd it will extract first and then zip, so always extract first""" + +join = """Join Splitted Files: -j + +This option will only work before extract and zip, so mostly it will be used with -m argument (same_dir) +By Reply: +/cmd -i 3 -j -m folder name +/cmd -b -j -m folder name +if u have link(folder) have splitted files: +/cmd link -j""" + +tg_links = """TG Links: + +Treat links like any direct link +Some links need user access so sure you must add USER_SESSION_STRING for it. +Three types of links: +Public: https://t.me/channel_name/message_id +Private: tg://openmessage?user_id=xxxxxx&message_id=xxxxx +Super: https://t.me/c/channel_id/message_id +Range: https://t.me/channel_name/first_message_id-last_message_id +Range Example: tg://openmessage?user_id=xxxxxx&message_id=555-560 or https://t.me/channel_name/100-150 +Note: Range link will work only by replying cmd to it""" + +sample_video = """Sample Video: -sv + +Create sample video for one video or folder of videos. +/cmd -sv (it will take the default values which 60sec sample duration and part duration is 4sec). +You can control those values. Example: /cmd -sv 70:5(sample-duration:part-duration) or /cmd -sv :5 or /cmd -sv 70.""" + +screenshot = """ScreenShots: -ss + +Create up to 10 screenshots for one video or folder of videos. +/cmd -ss (it will take the default values which is 10 photos). +You can control this value. Example: /cmd -ss 6.""" + +seed = """Bittorrent seed: -d + +/cmd link -d ratio:seed_time or by replying to file/link +To specify ratio and seed time add -d ratio:time. +Example: -d 0.7:10 (ratio and time) or -d 0.7 (only ratio) or -d :10 (only time) where time in minutes""" + +zip_arg = """Zip: -z password + +/cmd link -z (zip) +/cmd link -z password (zip password protected)""" + +qual = """Quality Buttons: -s + +In case default quality added from yt-dlp options using format option and you need to select quality for specific link or links with multi links feature. +/cmd link -s""" + +yt_opt = """Options: -opt + +/cmd link -opt playliststart:^10|fragment_retries:^inf|matchtitle:S13|writesubtitles:true|live_from_start:true|postprocessor_args:{"xtra": ["-threads", "4"]}|wait_for_video:(5, 100) +Note: Add `^` before integer or float, some values must be numeric and some string. +Like playlist_items:10 works with string, so no need to add `^` before the number but playlistend works only with integer so you must add `^` before the number like example above. +You can add tuple and dict also. Use double quotes inside dict.""" + +convert_media = """Convert Media: -ca -cv +/cmd link -ca mp3 -cv mp4 (convert all audios to mp3 and all videos to mp4) +/cmd link -ca mp3 (convert all audios to mp3) +/cmd link -cv mp4 (convert all videos to mp4) +/cmd link -ca mp3 + flac ogg (convert only flac and ogg audios to mp3) +/cmd link -cv mkv - webm flv (convert all videos to mp4 except webm and flv)""" + +gdrive = """Gdrive: link +If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID. +/cmd gdriveLink or gdl or gdriveId -up gdl or gdriveId or gd +/cmd tp:gdriveLink or tp:gdriveId -up tp:gdriveId or gdl or gd (to use token.pickle if service account enabled) +/cmd sa:gdriveLink or sa:gdriveId -p sa:gdriveId or gdl or gd (to use service account if service account disabled) +/cmd mtp:gdriveLink or mtp:gdriveId -up mtp:gdriveId or gdl or gd(if you have added upload gdriveId from usetting) (to use user token.pickle that uploaded by usetting)""" + +rclone_cl = """Rclone: path +If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH. +/cmd rcl/rclone_path -up rcl/rclone_path/rc -rcf flagkey:flagvalue|flagkey|flagkey:flagvalue +/cmd rcl or rclonePath -up rclonePath or rc or rcl +/cmd mrcc:rclonePath -up rcl or rc(if you have add rclone path from usetting) (to use user config)""" + +name_sub = """Name Substitution: -ns +/cmd link -ns tea : coffee : s|ACC : : s|mP4 +This will affect on all files. Format: wordToReplace : wordToReplaceWith : sensitiveCase +1. tea will get replaced by coffee with sensitive case because I have added `s` last of the option. +2. ACC will get removed because I have added nothing between to replace with sensitive case because I have added `s` last of the option. +3. mP4 will get removed because I have added nothing to replace with +""" + +YT_HELP_DICT = { + "main": yt, + "New-Name": f"{new_name}\nNote: Don't add file extension", + "Zip": zip_arg, + "Quality": qual, + "Options": yt_opt, + "Multi-Link": multi_link, + "Same-Directory": same_dir, + "Thumb": thumb, + "Upload-Destination": upload, + "Rclone-Flags": rcf, + "Bulk": bulk, + "Sample-Video": sample_video, + "Screenshot": screenshot, + "Convert-Media": convert_media, + "Name-Substitute": name_sub, +} + +MIRROR_HELP_DICT = { + "main": mirror, + "New-Name": new_name, + "DL-Auth": "Direct link authorization: -au -ap\n\n/cmd link -au username -ap password", + "Headers": "Direct link custom headers: -h\n\n/cmd link -h key: value key1: value1", + "Extract/Zip": extract_zip, + "Select-Files": "Bittorrent File Selection: -s\n\n/cmd link -s or by replying to file/link", + "Torrent-Seed": seed, + "Multi-Link": multi_link, + "Same-Directory": same_dir, + "Thumb": thumb, + "Upload-Destination": upload, + "Rclone-Flags": rcf, + "Bulk": bulk, + "Join": join, + "Rclone-DL": rlone_dl, + "Tg-Links": tg_links, + "Sample-Video": sample_video, + "Screenshot": screenshot, + "Convert-Media": convert_media, + "User-Download": user_download, + "Name-Substitute": name_sub, +} + +CLONE_HELP_DICT = { + "main": clone, + "Multi-Link": multi_link, + "Bulk": bulk, + "Gdrive": gdrive, + "Rclone": rclone_cl, +} + + +PASSWORD_ERROR_MESSAGE = """ +This link requires a password! +- Insert :: after the link and write the password after the sign. + +Example: link::my password +""" diff --git a/bot/helper/ext_utils/help_strings.py b/bot/helper/ext_utils/help_strings.py deleted file mode 100644 index 6522d08fc..000000000 --- a/bot/helper/ext_utils/help_strings.py +++ /dev/null @@ -1,171 +0,0 @@ -from bot import GROUPS_EMAIL - -YT_HELP_MESSAGE = """ -To use the commands, follow this format: -/{cmd} link options
or replying to link -/{cmd} options
- -OPTIONS: --s: Select quality for specific link or links. --z password: Create a password-protected zip file. --n new_name: Rename the file. --t thumbnail url: Custom thumbnail for each leech(raw or tg image url). --ss value: Generate ss for leech video, max 10 for each leach. --id drive_folder_link or drive_id -index https://anything.in/0: Upload to a custom drive. --opt playliststart:^10|fragment_retries:^inf|matchtitle:S13|writesubtitles:true|live_from_start:true|postprocessor_args:{{"ffmpeg": ["-threads", "4"]}}|wait_for_video:(5, 100): Set additional options. --i 10: Process multiple links. --b: Perform bulk download by replying to a text message or file with links separated with new line.- - -Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options. -""" - -MIRROR_HELP_MESSAGE = """ -To use the commands, follow this format: -/{cmd} link options
or replying to link -/{cmd} options
- -OPTIONS: --n new name: Rename the file or folder. --atc attachment url: Custom attachment for each mkv.(raw only) --t thumbnail url: Custom thumbnail for each leech.(raw or tg image url) --ss value: Generate ss for leech video, max 10 for each leach. --z or -z password: Zip the file or folder with or without password. --e or -e password: Extract the file or folder with or without password. --up upload destination: Upload the file or folder to a specific destination. --id drive_folder_link or -id drive_id -index https://anything.in/0:: Upload to a custom Google Drive folder or ID. --u username -p password: Provide authorization for a direct link. --s: Select a torrent file. --h Direct link custom headers: -h-""" - -CLONE_HELP_MESSAGE = """ -Send Gdrive link or rclone path along with command or by replying to the link/rc_path by command. - -Multi links only by replying to first gdlink or rclone_path: -/cmd
link -h Key: value Key1: value1. --d ratio:seed_time: Set the seeding ratio and time for a torrent. --i number of links/files: Process multiple links or files. --m folder name: Process multiple links or files within the same upload directory. --b: Perform bulk download by replying to a text message or file with multiple links separated with new line. --j: Join split files together before extracting or zipping. --rcf: Set Rclone flags for the command. -main:dump/ubuntu.iso or rcl: Treat a path as an rclone download./{cmd}
-i 10 (number of links/pathies) - -Gdrive: -/{cmd}
gdrivelink - -Upload Custom Drive: link -id -index --iddrive_folder_link
ordrive_id
-indexhttps://anything.in/0:
-drive_id must be a folder ID, and index must be a URL, otherwise it will not accept. - -Rclone: -/{cmd}
(rcl or rclone_path) -up (rcl or rclone_path) -rcf flagkey:flagvalue|flagkey|flagkey:flagvalue - -Note: If -up is not specified, the rclone destination will be the RCLONE_PATH from config.env. -""" - -PASSWORD_ERROR_MESSAGE = """ -This link requires a password! -- Insert sign :: after the link and write the password after the sign. -Example: {}::love you -Note: No spaces between the signs :: -For the password, you can use a space! -""" - - -bset_display_dict = { - "AS_DOCUMENT": "Default type of Telegram file upload. Default is False, meaning as media.", - "BASE_URL": "Valid BASE URL where the bot is deployed to use torrent web files selection. Collect it from Heroku.", - "LEECH_LIMIT": "To limit the Torrent/Direct/ytdlp leech size. The default unit is GB. Int", - "CLONE_LIMIT": "To limit the size of Google Drive folder/file which you can clone. The default unit is GB. Int", - "MEGA_LIMIT": "To limit the size of Mega download. The default unit is GB. Int", - "TORRENT_LIMIT": "To limit the size of torrent download. The default unit is GB. Int", - "DIRECT_LIMIT": "To limit the size of direct link download. The default unit is GB. Int", - "YTDLP_LIMIT": "To limit the size of ytdlp download. The default unit is GB. Int", - "PLAYLIST_LIMIT": "To limit the maximum number of playlists. Int", - "IMAGES": "Add multiple Telegraph (graph.org) image links, separated by spaces.", - "USER_MAX_TASKS": "Limit the maximum tasks for users of a group at a time. Use an integer.", - "GDRIVE_LIMIT": "To limit the size of Google Drive folder/file link for leech, zip, and unzip. The default unit is GB. Int", - "USER_TASKS_LIMIT": "The maximum limit on tasks for each user. Int", - "FSUB_IDS": "Fill in the chat_id (-100xxxxxx) of groups/channels you want to force subscribe. Separate them by space. Int\n\nNote: Bot should be added in the filled chat_id as admin.", - "BOT_TOKEN": "The Telegram Bot Token that you got from @BotFather.", - "CMD_SUFFIX": "Commands index number. This number will be added at the end of all commands.", - "DATABASE_URL": "Your Mongo Database URL (Connection string). Follow this Generate Database to generate the database. Data will be saved in the database: auth and sudo users, user settings including thumbnails for each user.\n\nNOTE: You can always edit all settings saved in the database from the official site -> (Browse collections).", - "DEFAULT_UPLOAD": 'Whether "rc" to upload to RCLONE_PATH or "gd" to upload to GDRIVE_ID. Default is "gd".', - "LEECH_DUMP_ID": "Chat ID where leeched files would be uploaded. Int. NOTE: Only available for superGroup/channel. Add -100 before the channel/superGroup ID. In short, don't add bot ID or your ID!", - "MIRROR_LOG_ID": "Chat ID where mirror files would be sent. Int. NOTE: Only available for superGroup/channel. Add -100 before the channel/superGroup ID. In short, don't add bot ID or your ID! For multiple IDs, separate them by space.", - "EXTENSION_FILTER": "File extensions that won't be uploaded/cloned. Separate them by space.", - "GDRIVE_ID": "This is the Folder/TeamDrive ID of Google Drive or root to which you want to upload all the mirrors using google-api-python-client.", - "INDEX_URL": "Refer to https://gitlab.com/ParveenBhadooOfficial/Google-Drive-Index.", - "SHOW_MEDIAINFO": "Add a button to show MediaInfo in leeched files. Bool", - "TOKEN_TIMEOUT": "Token timeout for each group member in seconds. Int", - "MEDIA_GROUP": "View uploaded split file parts in media group. Default is False.", - "MEGA_EMAIL": "Email used to sign in on mega.nz for using a premium account. Str", - "MEGA_PASSWORD": "Password for mega.nz account. Str", - "OWNER_ID": "The Telegram User ID (not username) of the owner of the bot.", - "QUEUE_ALL": "Number of parallel tasks for downloads and uploads. For example, if 20 tasks are added and QUEUE_ALL is 8, then the sum of uploading and downloading tasks is 8 and the rest are in the queue. Int. NOTE: If you want to fill QUEUE_DOWNLOAD or QUEUE_UPLOAD, then the QUEUE_ALL value must be greater than or equal to the largest one and less than or equal to the sum of QUEUE_UPLOAD and QUEUE_DOWNLOAD.", - "QUEUE_DOWNLOAD": "Number of all parallel downloading tasks. Int", - "QUEUE_UPLOAD": "Number of all parallel uploading tasks. Int", - "RCLONE_FLAGS": "key:value|key|key|key:value. Check here all RcloneFlags.", - "RCLONE_PATH": "Default rclone path to which you want to upload all the mirrors using rclone.", - "SEARCH_API_LINK": "Search API app link. Get your API from deploying this repository. Supported sites: 1337x, Piratebay, Nyaasi, Torlock, Torrent Galaxy, Zooqle, Kickass, Bitsearch, MagnetDL, Libgen, YTS, Limetorrent, TorrentFunk, Glodls, TorrentProject, and YourBittorrent.", - "SEARCH_LIMIT": "Search limit for the search API, limit for each site and not overall result limit. Default is zero (default API limit for each site).", - "STOP_DUPLICATE": "Bot will check file/folder name in Drive in case of uploading to GDRIVE_ID. If it's present in Drive, then downloading or cloning will be stopped. (NOTE: Item will be checked using name and not hash, so this feature is not perfect yet). Default is False.", - "TELEGRAM_API": "This is to authenticate your Telegram account for downloading Telegram files. You can get this from https://my.telegram.org.", - "TELEGRAM_HASH": "This is to authenticate your Telegram account for downloading Telegram files. You can get this from https://my.telegram.org.", - "TORRENT_TIMEOUT": "Timeout for dead torrents downloading with qBittorrent and Aria2c in seconds. Int", - "UPSTREAM_REPO": "Your GitHub repository link. If your repo is private, add https://username:{githubtoken}@github.com/{username}/{reponame} format. Get the token from GitHub settings. So you can update your bot from the filled repository on each restart.", - "UPSTREAM_BRANCH": "Upstream branch for updates. Default is main.", - "SET_COMMANDS": "Set bot commands automatically. Bool", - "USE_SERVICE_ACCOUNTS": "Whether to use Service Accounts or not, with google-api-python-client. For this to work see Using Service Accounts section below. Default is False", - "USER_SESSION_STRING": "To download/upload from your Telegram account. To generate a session string, use this commandpython3 generate_string_session.py
after mounting the repo folder for sure.\n\nNOTE: You can't use the bot with private messages. Use it with superGroup.", - "YT_DLP_OPTIONS": 'Default yt-dlp options. Check all possible options HERE or use this script to convert CLI arguments to API options. Format: key:value|key:value|key:value. Add ^ before an integer or float, some numbers must be numeric and some strings. \nExample: "format:bv*+mergeall[vcodec=none]|nocheckcertificate:True".', -} - -uset_display_dict = { - "rcc": [ - "RClone is a command-line program to sync files and directories to and from different cloud storage providers like GDrive, OneDrive...", - "Send rclone.conf. Timeout: 60 sec", - ], - "prefix": [ - "Filename Prefix is the front part attached to the filename of the leech files.", - "Send filename prefix. Timeout: 60 sec", - ], - "suffix": [ - "Filename Suffix is the end part attached to the filename of the leech files.", - "Send filename suffix. Timeout: 60 sec", - ], - "remname": [ - "Filename Remname is a combination of regex patterns used for removing or manipulating the filename of the leech files.", - "Send filename remname. Timeout: 60 sec", - ], - "metadata": [ - "Metadata will change MKV video files including all audio, streams, and subtitle titles.", - "Send metadata title. Timeout: 60 sec", - ], - "attachment": [ - "Attachment url, it will added in mkv as thumbnail or cover photo, whetever you say.", - "Send raw photo url, example from imgbb.com . Timeout: 60 sec", - ], - "lcaption": [ - "Leech Caption is the custom caption on the leech files uploaded by the bot.", - "Send leech caption. You can add HTML tags. Timeout: 60 sec", - ], - "ldump": [ - "Leech Files User Dump for personal use as a storage.", - "Send leech dump channel ID. Timeout: 60 sec", - ], - "thumb": [ - "Custom thumbnail to appear on the leeched files uploaded by the bot.", - "Send a photo to save it as a custom thumbnail. Timeout: 60 sec", - ], - "yt_opt": [ - "YT-DLP Options are the custom quality settings for the extraction of videos from yt-dlp supported sites.", - 'Send YT-DLP options. Timeout: 60 sec\nFormat: key:value|key:value|key:value.\nExample: format:bv*+mergeall[vcodec=none]|nocheckcertificate:True\nCheck all yt-dlp API options from this file or use this script to convert CLI arguments to API options.', - ], - "user_tds": [ - f'UserTD helps to upload files via the bot to your custom drive destination through global SA mail.\n\nSA Mail: {SA if (SA := GROUPS_EMAIL) else "Not Specified"}', - "Send User TD details for use while mirroring/cloning.\nFormat:\nname drive_id/link index (optional)\n\nNOTE:\n1. You must add our SA mail to your drive with write permission.\n2. Names can have spaces.\n3. Drive ID must be valid for acceptance.\n\nTimeout: 60 sec.", - ], -} diff --git a/bot/helper/ext_utils/links_utils.py b/bot/helper/ext_utils/links_utils.py new file mode 100644 index 000000000..17958c0ac --- /dev/null +++ b/bot/helper/ext_utils/links_utils.py @@ -0,0 +1,57 @@ +from re import match as re_match + + +def is_magnet(url: str): + return bool(re_match(r"magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*", url)) + + +def is_url(url: str): + return bool( + re_match( + r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$", + url, + ) + ) + + +def is_gdrive_link(url: str): + return "drive.google.com" in url or "drive.usercontent.google.com" in url + + +def is_telegram_link(url: str): + return url.startswith(("https://t.me/", "tg://openmessage?user_id=")) + + +def is_share_link(url: str): + return bool( + re_match( + r"https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+", + url, + ) + ) + + +def is_rclone_path(path: str): + return bool( + re_match( + r"^(mrcc:)?(?!(magnet:|mtp:|sa:|tp:))(?![- ])[a-zA-Z0-9_\. -]+(? 1 or audios > 1 + return False + + +async def get_media_info(path): + try: + result = await cmd_exec( + [ + "ffprobe", + "-hide_banner", + "-loglevel", + "error", + "-print_format", + "json", + "-show_format", + path, + ] + ) + except Exception as e: + LOGGER.error(f"Get Media Info: {e}. Mostly File not found! - File: {path}") + return 0, None, None + if result[0] and result[2] == 0: + fields = eval(result[0]).get("format") + if fields is None: + LOGGER.error(f"get_media_info: {result}") + return 0, None, None + duration = round(float(fields.get("duration", 0))) + tags = fields.get("tags", {}) + artist = tags.get("artist") or tags.get("ARTIST") or tags.get("Artist") + title = tags.get("title") or tags.get("TITLE") or tags.get("Title") + return duration, artist, title + return 0, None, None + + +async def get_document_type(path): + is_video, is_audio, is_image = False, False, False + if path.endswith(tuple(ARCH_EXT)) or re_search( + r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", path + ): + return is_video, is_audio, is_image + mime_type = await sync_to_async(get_mime_type, path) + if mime_type.startswith("image"): + return False, False, True + if mime_type.startswith("audio"): + return False, True, False + if not mime_type.startswith("video") and not mime_type.endswith("octet-stream"): + return is_video, is_audio, is_image + try: + result = await cmd_exec( + [ + "ffprobe", + "-hide_banner", + "-loglevel", + "error", + "-print_format", + "json", + "-show_streams", + path, + ] + ) + if result[1] and mime_type.startswith("video"): + is_video = True + except Exception as e: + LOGGER.error( + f"Get Document Type: {e}. Mostly File not found! - File: {path}" + ) + if mime_type.startswith("video"): + is_video = True + return is_video, is_audio, is_image + if result[0] and result[2] == 0: + fields = eval(result[0]).get("streams") + if fields is None: + LOGGER.error(f"get_document_type: {result}") + return is_video, is_audio, is_image + is_video = False + for stream in fields: + if stream.get("codec_type") == "video": + is_video = True + elif stream.get("codec_type") == "audio": + is_audio = True + return is_video, is_audio, is_image + + +def is_mkv(file): + return file.lower().endswith(".mkv") + + +async def take_ss(video_file, ss_nb) -> bool: + ss_nb = min(ss_nb, 10) + duration = (await get_media_info(video_file))[0] + if duration != 0: + dirpath, name = video_file.rsplit("/", 1) + name, _ = ospath.splitext(name) + dirpath = f"{dirpath}/{name}_joyass/" + await makedirs(dirpath, exist_ok=True) + interval = duration // (ss_nb + 1) + cap_time = interval + cmds = [] + for i in range(ss_nb): + output = f"{dirpath}SS.{name}_{i:02}.png" + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-ss", + f"{cap_time}", + "-i", + video_file, + "-q:v", + "1", + "-frames:v", + "1", + output, + ] + cap_time += interval + cmds.append(cmd_exec(cmd)) + try: + resutls = await wait_for(gather(*cmds), timeout=60) + if resutls[0][2] != 0: + LOGGER.error( + f"Error while creating sreenshots from video. Path: {video_file}. stderr: {resutls[0][1]}" + ) + await rmtree(dirpath, ignore_errors=True) + return False + except Exception: + LOGGER.error( + f"Error while creating sreenshots from video. Path: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!" + ) + await rmtree(dirpath, ignore_errors=True) + return False + return dirpath + LOGGER.error("take_ss: Can't get the duration of video") + await rmtree(dirpath, ignore_errors=True) + return False + + +async def get_audio_thumb(audio_file): + des_dir = "Thumbnails/" + await makedirs(des_dir, exist_ok=True) + des_dir = f"Thumbnails/{time()}.jpg" + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-i", + audio_file, + "-an", + "-vcodec", + "copy", + des_dir, + ] + _, err, code = await cmd_exec(cmd) + if code != 0 or not await aiopath.exists(des_dir): + LOGGER.error( + f"Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}" + ) + return None + return des_dir + + +async def create_thumbnail(video_file, duration): + des_dir = "Thumbnails" + await makedirs(des_dir, exist_ok=True) + des_dir = ospath.join(des_dir, f"{time()}.jpg") + if duration is None: + duration = (await get_media_info(video_file))[0] + if duration == 0: + duration = 3 + duration = duration // 2 + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-ss", + f"{duration}", + "-i", + video_file, + "-vf", + "thumbnail", + "-frames:v", + "1", + des_dir, + ] + try: + _, err, code = await wait_for(cmd_exec(cmd), timeout=60) + if code != 0 or not await aiopath.exists(des_dir): + LOGGER.error( + f"Error while extracting thumbnail from video. Name: {video_file} stderr: {err}" + ) + return None + except Exception: + LOGGER.error( + f"Error while extracting thumbnail from video. Name: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!" + ) + return None + return des_dir + + +async def split_file( + path, + size, + dirpath, + file_, + split_size, + listener, + start_time=0, + i=1, + multi_streams=True, +): + if listener.seed and not listener.newDir: + dirpath = f"{dirpath}/splited_files_joya" + await makedirs(dirpath, exist_ok=True) + + parts = -(-size // listener.splitSize) + + if not listener.asDoc and (await get_document_type(path))[0]: + if multi_streams: + multi_streams = await is_multi_streams(path) + duration = (await get_media_info(path))[0] + base_name, extension = ospath.splitext(file_) + split_size -= 5000000 + + while i <= parts or start_time < duration - 4: + out_path = f"{dirpath}/{base_name}.part{i:03}{extension}" + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-ss", + str(start_time), + "-i", + path, + "-fs", + str(split_size), + "-map", + "0", + "-map_chapters", + "-1", + "-async", + "1", + "-strict", + "-2", + "-c", + "copy", + out_path, + ] + if not multi_streams: + del cmd[10:12] + + if listener.isCancelled: + return False + + async with subprocess_lock: + listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE) + _, stderr = await listener.suproc.communicate() + + if listener.isCancelled: + return False + + code = listener.suproc.returncode + if code == -9: + listener.isCancelled = True + return False + if code != 0: + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + + with suppress(Exception): + await remove(out_path) + + if multi_streams: + LOGGER.warning( + f"{stderr}. Retrying without map, -map 0 not working in all situations. Path: {path}" + ) + return await split_file( + path, + size, + dirpath, + file_, + split_size, + listener, + start_time, + i, + False, + ) + LOGGER.warning( + f"{stderr}. Unable to split this video, if its size is less than {listener.SplitSize} it will be uploaded as it is. Path: {path}" + ) + return False + + out_size = await aiopath.getsize(out_path) + if out_size > listener.splitSize: + dif = out_size - listener.splitSize + split_size -= dif + 5000000 + await remove(out_path) + return await split_file( + path, + size, + dirpath, + file_, + split_size, + listener, + start_time, + i, + multi_streams, + ) + + lpd = (await get_media_info(out_path))[0] + if lpd == 0: + LOGGER.error( + f"Something went wrong while splitting, mostly file is corrupted. Path: {path}" + ) + break + if duration == lpd: + LOGGER.warning( + f"This file has been split with default stream and audio, so you will only see one part with less size from the original one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {path}" + ) + break + if lpd <= 3: + await remove(out_path) + break + + start_time += lpd - 3 + i += 1 + else: + out_path = f"{dirpath}/{file_}." + async with subprocess_lock: + if listener.isCancelled: + return False + + listener.suproc = await create_subprocess_exec( + "split", + "--numeric-suffixes=1", + "--suffix-length=3", + f"--bytes={split_size}", + path, + out_path, + stderr=PIPE, + ) + _, stderr = await listener.suproc.communicate() + + if listener.isCancelled: + return False + + code = listener.suproc.returncode + if code == -9: + listener.isCancelled = True + return False + if code != 0: + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + + LOGGER.error(f"{stderr}. Split Document: {path}") + return True + + +async def createSampleVideo(listener, video_file, sample_duration, part_duration): + filter_complex = "" + dir, name = video_file.rsplit("/", 1) + output_file = f"{dir}/SAMPLE.{name}" + segments = [(0, part_duration)] + duration = (await get_media_info(video_file))[0] + remaining_duration = duration - (part_duration * 2) + parts = (sample_duration - (part_duration * 2)) // part_duration + time_interval = remaining_duration // parts + next_segment = time_interval + for _ in range(parts): + segments.append((next_segment, next_segment + part_duration)) + next_segment += time_interval + segments.append((duration - part_duration, duration)) + + for i, (start, end) in enumerate(segments): + filter_complex += ( + f"[0:v]trim=start={start}:end={end},setpts=PTS-STARTPTS[v{i}]; " + ) + filter_complex += ( + f"[0:a]atrim=start={start}:end={end},asetpts=PTS-STARTPTS[a{i}]; " + ) + + for i in range(len(segments)): + filter_complex += f"[v{i}][a{i}]" + + filter_complex += f"concat=n={len(segments)}:v=1:a=1[vout][aout]" + + cmd = [ + "xtra", + "-i", + video_file, + "-filter_complex", + filter_complex, + "-map", + "[vout]", + "-map", + "[aout]", + "-c:v", + "libx264", + "-c:a", + "aac", + "-threads", + f"{cpu_count() // 2}", + output_file, + ] + + if listener.isCancelled: + return False + listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE) + _, stderr = await listener.suproc.communicate() + if listener.isCancelled: + return False + code = listener.suproc.returncode + if code == -9: + listener.isCancelled = True + return False + if code == 0: + return output_file + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error( + f"{stderr}. Something went wrong while creating sample video, mostly file is corrupted. Path: {video_file}" + ) + if await aiopath.exists(output_file): + await remove(output_file) + return False diff --git a/bot/helper/ext_utils/shorteners.py b/bot/helper/ext_utils/shorteners.py deleted file mode 100644 index 12692c6b7..000000000 --- a/bot/helper/ext_utils/shorteners.py +++ /dev/null @@ -1,83 +0,0 @@ -from time import sleep -from base64 import b64encode -from random import choice, random, randrange -from urllib.parse import quote - -from urllib3 import disable_warnings -from cloudscraper import create_scraper - -from bot import LOGGER, shorteners_list - - -def short_url(longurl, attempt=0): - if not shorteners_list: - return longurl - if attempt >= 4: - return longurl - i = 0 if len(shorteners_list) == 1 else randrange(len(shorteners_list)) - _shorten_dict = shorteners_list[i] - _shortener = _shorten_dict["domain"] - _shortener_api = _shorten_dict["api_key"] - cget = create_scraper().request - disable_warnings() - try: - if "shorte.st" in _shortener: - headers = {"public-api-token": _shortener_api} - data = {"urlToShorten": quote(longurl)} - return cget( - "PUT", - "https://api.shorte.st/v1/data/url", - headers=headers, - data=data, - ).json()["shortenedUrl"] - if "linkvertise" in _shortener: - url = quote(b64encode(longurl.encode("utf-8"))) - linkvertise = [ - f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - ] - return choice(linkvertise) - if "bitly.com" in _shortener: - headers = {"Authorization": f"Bearer {_shortener_api}"} - return cget( - "POST", - "https://api-ssl.bit.ly/v4/shorten", - json={"long_url": longurl}, - headers=headers, - ).json()["link"] - if "ouo.io" in _shortener: - return cget( - "GET", - f"http://ouo.io/api/{_shortener_api}?s={longurl}", - verify=False, - ).text - if "cutt.ly" in _shortener: - return cget( - "GET", - f"http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}", - ).json()["url"]["shortLink"] - res = cget( - "GET", - f"https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}", - ).json() - shorted = res["shortenedUrl"] - if not shorted: - shrtco_res = cget( - "GET", f"https://api.shrtco.de/v2/shorten?url={quote(longurl)}" - ).json() - shrtco_link = shrtco_res["result"]["full_short_link"] - res = cget( - "GET", - f"https://{_shortener}/api?api={_shortener_api}&url={shrtco_link}", - ).json() - shorted = res["shortenedUrl"] - if not shorted: - shorted = longurl - return shorted - except Exception as e: - LOGGER.error(e) - sleep(1) - attempt += 1 - return short_url(longurl, attempt) diff --git a/bot/helper/ext_utils/status_utils.py b/bot/helper/ext_utils/status_utils.py new file mode 100644 index 000000000..baa15d41d --- /dev/null +++ b/bot/helper/ext_utils/status_utils.py @@ -0,0 +1,229 @@ +from html import escape +from time import time +from asyncio import iscoroutinefunction +from contextlib import suppress + +from psutil import disk_usage + +from bot import ( + DOWNLOAD_DIR, + task_dict, + status_dict, + botStartTime, + task_dict_lock, +) +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.telegram_helper.button_build import ButtonMaker + +SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB"] + + +class MirrorStatus: + STATUS_UPLOADING_TG = "Uploading to Telegram" + STATUS_UPLOADING_GD = "Uploading to Gdrive" + STATUS_UPLOADING_RC = "Uploading to Rclone" + STATUS_DOWNLOADING_TG = "Downloading from Telegram" + STATUS_DOWNLOADING_MEGA = "Downloading from Mega" + STATUS_DOWNLOADING_GD = "Downloading from Gdrive" + STATUS_DOWNLOADING_A = "Downloading with Aria" + STATUS_DOWNLOADING_YT = "Downloading with yt-dlp" + STATUS_DOWNLOADING_Q = "Downloading with qBitTorrent" + STATUS_DOWNLOADING_RC = "Downloading from Rclone" + STATUS_CLONING_GD = "Cloning to Gdrive" + STATUS_CLONING_RC = "Cloning to Rclone" + STATUS_QUEUEDL = "Download is pending" + STATUS_QUEUEUP = "Upload is pending" + STATUS_PAUSED = "Paused" + STATUS_ARCHIVING = "Archiving with p7zip" + STATUS_EXTRACTING = "Extracting with p7zip" + STATUS_SPLITTING = "Splitting with p7zip" + STATUS_CHECKING = "CheckUp" + STATUS_SEEDING = "Seeding torrent" + STATUS_SAMVID = "Generating sample video" + STATUS_CONVERTING = "Converting format" + STATUS_METADATA = "Metadata modifying" + + +async def getTaskByGid(gid: str): + async with task_dict_lock: + for task in task_dict.values(): + if hasattr(task, "seeding"): + await sync_to_async(task.update) + if task.gid().startswith(gid): + return task + return None + + +def getSpecificTasks(status, userId): + if status == "All": + if userId: + return [tk for tk in task_dict.values() if tk.listener.userId == userId] + return list(task_dict.values()) + if userId: + return [ + tk + for tk in task_dict.values() + if tk.listener.userId == userId + and ( + (st := tk.status()) + and st == status + or status == MirrorStatus.STATUS_DOWNLOADING + ) + ] + return [ + tk + for tk in task_dict.values() + if (st := tk.status()) + and st == status + or status == MirrorStatus.STATUS_DOWNLOADING + ] + + +async def getAllTasks(req_status: str, userId): + async with task_dict_lock: + return await sync_to_async(getSpecificTasks, req_status, userId) + + +def get_readable_file_size(size_in_bytes: int): + if size_in_bytes is None: + return "0B" + index = 0 + while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1: + size_in_bytes /= 1024 + index += 1 + return ( + f"{size_in_bytes:.2f}{SIZE_UNITS[index]}" + if index > 0 + else f"{size_in_bytes:.2f}B" + ) + + +def get_readable_time(seconds, full_time=False): + periods = [ + ("millennium", 31536000000), + ("century", 3153600000), + ("decade", 315360000), + ("year", 31536000), + ("month", 2592000), + ("week", 604800), + ("day", 86400), + ("hour", 3600), + ("minute", 60), + ("second", 1), + ] + result = "" + for period_name, period_seconds in periods: + if seconds >= period_seconds: + period_value, seconds = divmod(seconds, period_seconds) + plural_suffix = "s" if period_value > 1 else "" + result += f"{int(period_value)} {period_name}{plural_suffix} " + if not full_time: + break + return result.strip() + + +def time_to_seconds(time_duration): + hours, minutes, seconds = map(int, time_duration.split(":")) + return hours * 3600 + minutes * 60 + seconds + + +def speed_string_to_bytes(size_text: str): + size = 0 + size_text = size_text.lower() + if "k" in size_text: + size += float(size_text.split("k")[0]) * 1024 + elif "m" in size_text: + size += float(size_text.split("m")[0]) * 1048576 + elif "g" in size_text: + size += float(size_text.split("g")[0]) * 1073741824 + elif "t" in size_text: + size += float(size_text.split("t")[0]) * 1099511627776 + elif "b" in size_text: + size += float(size_text.split("b")[0]) + return size + + +def progress_bar(pct): + if isinstance(pct, str): + pct = float(pct.strip("%")) + p = min(max(pct, 0), 100) + cFull = int((p + 5) // 10) + p_str = "●" * cFull + p_str += "○" * (10 - cFull) + return p_str + + +def source(self): + return ( + sender_chat.title + if (sender_chat := self.message.sender_chat) + else self.message.from_user.username or self.message.from_user.id + ) + + +async def get_readable_message(sid, is_user, page_no=1, status="All", page_step=1): + msg = "" + button = None + + tasks = await sync_to_async(getSpecificTasks, status, sid if is_user else None) + + STATUS_LIMIT = 4 + tasks_no = len(tasks) + pages = (max(tasks_no, 1) + STATUS_LIMIT - 1) // STATUS_LIMIT + if page_no > pages: + page_no = (page_no - 1) % pages + 1 + status_dict[sid]["page_no"] = page_no + elif page_no < 1: + page_no = pages - (abs(page_no) % pages) + status_dict[sid]["page_no"] = page_no + start_position = (page_no - 1) * STATUS_LIMIT + + for index, task in enumerate( + tasks[start_position : STATUS_LIMIT + start_position], start=1 + ): + tstatus = await sync_to_async(task.status) if status == "All" else status + msg += f"{index + start_position}. {tstatus}:\n" + msg += f"{escape(f'{task.name()}')}" + if tstatus not in [ + MirrorStatus.STATUS_SPLITTING, + MirrorStatus.STATUS_SEEDING, + MirrorStatus.STATUS_SAMVID, + MirrorStatus.STATUS_CONVERTING, + MirrorStatus.STATUS_QUEUEUP, + MirrorStatus.STATUS_METADATA, + ]: + progress = ( + await task.progress() + if iscoroutinefunction(task.progress) + else task.progress() + ) + msg += f"\n{progress_bar(progress)} {progress}" + msg += f"\n{task.processed_bytes()} of {task.size()}" + msg += f"\nSpeed: {task.speed()}\nEstimated: {task.eta()}" + if hasattr(task, "seeders_num"): + with suppress(Exception): + msg += f"\nSeeders: {task.seeders_num()} Leechers: {task.leechers_num()}" + elif tstatus == MirrorStatus.STATUS_SEEDING: + msg += f"\nSize: {task.size()}" + msg += f"\nSpeed: {task.seed_speed()}" + msg += f"\nUploaded: {task.uploaded_bytes()}" + msg += f"\nRatio: {task.ratio()}" + else: + msg += f"\nSize: {task.size()}" + msg += f"\nElapsed: {get_readable_time(time() - task.message.date.timestamp())}" + msg += f"\nBy: {source (task.listener)}" + msg += f"\n/stop_{task.gid()[:7]}\n\n" + + if len(msg) == 0: + if status == "All": + return None, None + msg = f"No Active {status} Tasks!\n\n" + if tasks_no > STATUS_LIMIT: + buttons = ButtonMaker() + msg += f"\nPage: {page_no}/{pages} | Tasks: {tasks_no}" + buttons.callback("Prev", f"status {sid} pre", position="header") + buttons.callback("Next", f"status {sid} nex", position="header") + button = buttons.menu(8) + msg += f"\nFree disk: {get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)}" + msg += f"\nBot uptime: {get_readable_time(time() - botStartTime)}" + return msg, button diff --git a/bot/helper/ext_utils/task_manager.py b/bot/helper/ext_utils/task_manager.py index 9192c2391..d04c7c0a7 100644 --- a/bot/helper/ext_utils/task_manager.py +++ b/bot/helper/ext_utils/task_manager.py @@ -1,84 +1,104 @@ -from asyncio import Event +from asyncio import Event, sleep from bot import ( LOGGER, - OWNER_ID, queued_dl, queued_up, - user_data, config_dict, - download_dict, non_queued_dl, non_queued_up, queue_dict_lock, ) -from bot.helper.ext_utils.bot_utils import ( - sync_to_async, - get_user_tasks, - checking_access, - get_telegraph_list, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import get_base_name, check_storage_threshold -from bot.helper.telegram_helper.message_utils import BotPm_check, isAdmin, forcesub -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper +from bot.helper.ext_utils.bot_utils import sync_to_async, get_telegraph_list +from bot.helper.ext_utils.files_utils import get_base_name +from bot.helper.ext_utils.links_utils import is_gdrive_id +from bot.helper.mirror_leech_utils.gdrive_utils.search import gdSearch -async def stop_duplicate_check(name, listener): +async def stop_duplicate_check(listener): if ( - not config_dict["STOP_DUPLICATE"] + isinstance(listener.upDest, int) or listener.is_leech - or listener.upPath != "gd" or listener.select + or not is_gdrive_id(listener.upDest) + or (listener.upDest.startswith("mtp:") and listener.stopDuplicate) + or not listener.stopDuplicate + or listener.same_dir ): return False, None + + name = listener.name LOGGER.info(f"Checking File/Folder if already in Drive: {name}") + if listener.compress: - name = f"{name}.zip" + name = f"{name}.7z" elif listener.extract: try: name = get_base_name(name) except Exception: name = None + if name is not None: telegraph_content, contents_no = await sync_to_async( - GoogleDriveHelper().drive_list, name, stopDup=True + gdSearch(stopDup=True, noMulti=listener.isClone).drive_list, + name, + listener.upDest, + listener.userId, ) if telegraph_content: msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:" button = await get_telegraph_list(telegraph_content) return msg, button + return False, None -async def is_queued(uid): +async def check_running_tasks(listener, state="dl"): all_limit = config_dict["QUEUE_ALL"] - dl_limit = config_dict["QUEUE_DOWNLOAD"] + state_limit = ( + config_dict["QUEUE_DOWNLOAD"] + if state == "dl" + else config_dict["QUEUE_UPLOAD"] + ) event = None - added_to_queue = False - if all_limit or dl_limit: - async with queue_dict_lock: - dl = len(non_queued_dl) - up = len(non_queued_up) - if ( + is_over_limit = False + async with queue_dict_lock: + if state == "up" and listener.mid in non_queued_dl: + non_queued_dl.remove(listener.mid) + if all_limit or state_limit: + dl_count = len(non_queued_dl) + up_count = len(non_queued_up) + t_count = dl_count if state == "dl" else up_count + is_over_limit = ( all_limit - and dl + up >= all_limit - and (not dl_limit or dl >= dl_limit) - ) or (dl_limit and dl >= dl_limit): - added_to_queue = True + and dl_count + up_count >= all_limit + and (not state_limit or t_count >= state_limit) + ) or (state_limit and t_count >= state_limit) + if is_over_limit: event = Event() - queued_dl[uid] = event - return added_to_queue, event + if state == "dl": + queued_dl[listener.mid] = event + else: + queued_up[listener.mid] = event + if not is_over_limit: + if state == "up": + non_queued_up.add(listener.mid) + else: + non_queued_dl.add(listener.mid) + return is_over_limit, event -def start_dl_from_queued(uid): - queued_dl[uid].set() - del queued_dl[uid] +async def start_dl_from_queued(mid: int): + queued_dl[mid].set() + del queued_dl[mid] + await sleep(0.7) -def start_up_from_queued(uid): - queued_up[uid].set() - del queued_up[uid] + +async def start_up_from_queued(mid: int): + queued_up[mid].set() + del queued_up[mid] + await sleep(0.7) async def start_from_queued(): @@ -92,15 +112,15 @@ async def start_from_queued(): if all_ < all_limit: f_tasks = all_limit - all_ if queued_up and (not up_limit or up < up_limit): - for index, uid in enumerate(list(queued_up.keys()), start=1): + for index, mid in enumerate(list(queued_up.keys()), start=1): f_tasks = all_limit - all_ - start_up_from_queued(uid) + await start_up_from_queued(mid) f_tasks -= 1 if f_tasks == 0 or (up_limit and index >= up_limit - up): break if queued_dl and (not dl_limit or dl < dl_limit) and f_tasks != 0: - for index, uid in enumerate(list(queued_dl.keys()), start=1): - start_dl_from_queued(uid) + for index, mid in enumerate(list(queued_dl.keys()), start=1): + await start_dl_from_queued(mid) if (dl_limit and index >= dl_limit - dl) or index == f_tasks: break return @@ -110,150 +130,27 @@ async def start_from_queued(): up = len(non_queued_up) if queued_up and up < up_limit: f_tasks = up_limit - up - for index, uid in enumerate(list(queued_up.keys()), start=1): - start_up_from_queued(uid) + for index, mid in enumerate(list(queued_up.keys()), start=1): + await start_up_from_queued(mid) if index == f_tasks: break else: async with queue_dict_lock: if queued_up: - for uid in list(queued_up.keys()): - start_up_from_queued(uid) + for mid in list(queued_up.keys()): + await start_up_from_queued(mid) if dl_limit := config_dict["QUEUE_DOWNLOAD"]: async with queue_dict_lock: dl = len(non_queued_dl) if queued_dl and dl < dl_limit: f_tasks = dl_limit - dl - for index, uid in enumerate(list(queued_dl.keys()), start=1): - start_dl_from_queued(uid) + for index, mid in enumerate(list(queued_dl.keys()), start=1): + await start_dl_from_queued(mid) if index == f_tasks: break else: async with queue_dict_lock: if queued_dl: - for uid in list(queued_dl.keys()): - start_dl_from_queued(uid) - - -async def limit_checker( - size, - listener, - is_torrent=False, - is_mega=False, - is_drive_link=False, - is_ytdlp=False, - is_playlist=None, -): - LOGGER.info("Checking limit") - user_id = listener.message.from_user.id - if ( - user_id == OWNER_ID - or user_id in user_data - and user_data[user_id].get("is_sudo") - ): - return None - if await isAdmin(listener.message): - return None - limit_exceeded = "" - if listener.is_clone: - if clone_limit := config_dict["CLONE_LIMIT"]: - limit = clone_limit * 1024**3 - if size > limit: - limit_exceeded = f"Clone limit is {get_readable_file_size(limit)}." - elif is_mega: - if mega_limit := config_dict["MEGA_LIMIT"]: - limit = mega_limit * 1024**3 - if size > limit: - limit_exceeded = f"Mega limit is {get_readable_file_size(limit)}" - elif is_drive_link: - if gd_limit := config_dict["GDRIVE_LIMIT"]: - limit = gd_limit * 1024**3 - if size > limit: - limit_exceeded = ( - f"Google drive limit is {get_readable_file_size(limit)}" - ) - elif is_ytdlp: - if ytdlp_limit := config_dict["YTDLP_LIMIT"]: - limit = ytdlp_limit * 1024**3 - if size > limit: - limit_exceeded = f"Ytdlp limit is {get_readable_file_size(limit)}" - if ( - is_playlist != 0 - and (playlist_limit := config_dict["PLAYLIST_LIMIT"]) - and is_playlist > playlist_limit - ): - limit_exceeded = f"Playlist limit is {PLAYLIST_LIMIT}" - elif is_torrent: - if torrent_limit := config_dict["TORRENT_LIMIT"]: - limit = torrent_limit * 1024**3 - if size > limit: - limit_exceeded = f"Torrent limit is {get_readable_file_size(limit)}" - elif direct_limit := config_dict["DIRECT_LIMIT"]: - limit = direct_limit * 1024**3 - if size > limit: - limit_exceeded = f"Direct limit is {get_readable_file_size(limit)}" - if not limit_exceeded: - if (leech_limit := config_dict["LEECH_LIMIT"]) and listener.is_leech: - limit = leech_limit * 1024**3 - if size > limit: - limit_exceeded = f"Leech limit is {get_readable_file_size(limit)}" - if not listener.is_clone: - arch = any([listener.compress, listener.extract]) - limit = 3 * 1024**3 - acpt = await sync_to_async(check_storage_threshold, size, limit, arch) - if not acpt: - limit_exceeded = "You must leave 3GB free storage." - if limit_exceeded: - if size: - return f"{limit_exceeded}.\nYour file or folder size is {get_readable_file_size(size)}." - if is_playlist != 0: - return f"{limit_exceeded}.\nYour playlist has {is_playlist} files." - return None - return None - - -async def task_utils(message): - msg = [] - button = None - user_id = message.from_user.id - token = config_dict["TOKEN_TIMEOUT"] - admin = await isAdmin(message) - if message.chat.type != message.chat.type.BOT: - if ids := config_dict["FSUB_IDS"]: - _msg, button = await forcesub(message, ids, button) - if _msg: - msg.append(_msg) - if not token or ( - token - and ( - admin - or user_id == OWNER_ID - or (user_id in user_data and user_data[user_id].get("is_sudo")) - ) - ): - _msg, button = await BotPm_check(message, button) - if _msg: - msg.append(_msg) - if ( - user_id == OWNER_ID - or user_id in user_data - and user_data[user_id].get("is_sudo") - ): - return msg, button - if admin: - return msg, button - token_msg, button = await checking_access(message.from_user.id, button) - if token_msg is not None: - msg.append(token_msg) - if (bmax_tasks := config_dict["BOT_MAX_TASKS"]) and len( - download_dict - ) >= bmax_tasks: - msg.append( - f"Bot Max Tasks limit exceeded.\nBot max tasks limit is {bmax_tasks}.\nPlease wait for the completion of other tasks." - ) - if (maxtask := config_dict["USER_MAX_TASKS"]) and await get_user_tasks( - message.from_user.id, maxtask - ): - msg.append(f"Your tasks limit exceeded for {maxtask} tasks") - return msg, button + for mid in list(queued_dl.keys()): + await start_dl_from_queued(mid) diff --git a/bot/helper/ext_utils/telegraph_helper.py b/bot/helper/ext_utils/telegraph_helper.py index 9a1ae5fcf..9ba5ab569 100644 --- a/bot/helper/ext_utils/telegraph_helper.py +++ b/bot/helper/ext_utils/telegraph_helper.py @@ -4,32 +4,31 @@ from telegraph.aio import Telegraph from telegraph.exceptions import RetryAfterError -from bot import LOGGER, bot_loop +from bot import LOGGER class TelegraphHelper: - def __init__(self): - self.telegraph = Telegraph(domain="graph.org") - self.short_name = token_hex(4) - self.access_token = None - self.author_name = "Aeon" - self.author_url = "https://t.me/ProjectAeon" + def __init__(self, author_name=None, author_url=None): + self._telegraph = Telegraph(domain="graph.org") + self._author_name = author_name + self._author_url = author_url async def create_account(self): - await self.telegraph.create_account( - short_name=self.short_name, - author_name=self.author_name, - author_url=self.author_url, - ) - self.access_token = self.telegraph.get_access_token() - LOGGER.info("Creating Telegraph Account") + try: + await self._telegraph.create_account( + short_name=token_hex(4), + author_name=self._author_name, + author_url=self._author_url, + ) + except Exception as e: + LOGGER.error(f"Failed to create Telegraph Account: {e}") async def create_page(self, title, content): try: - return await self.telegraph.create_page( + return await self._telegraph.create_page( title=title, - author_name=self.author_name, - author_url=self.author_url, + author_name=self._author_name, + author_url=self._author_url, html_content=content, ) except RetryAfterError as st: @@ -41,11 +40,11 @@ async def create_page(self, title, content): async def edit_page(self, path, title, content): try: - return await self.telegraph.edit_page( + return await self._telegraph.edit_page( path=path, title=title, - author_name=self.author_name, - author_url=self.author_url, + author_name=self._author_name, + author_url=self._author_url, html_content=content, ) except RetryAfterError as st: @@ -73,9 +72,10 @@ async def edit_telegraph(self, path, telegraph_content): content += f' | Next' nxt_page += 1 await self.edit_page( - path=path[prev_page], title="Torrent Search", content=content + path=path[prev_page], + title="Torrent Search", + content=content, ) -telegraph = TelegraphHelper() -bot_loop.run_until_complete(telegraph.create_account()) +telegraph = TelegraphHelper("Aeon", "https://t.me/ProjectAeon") diff --git a/bot/helper/listeners/aria2_listener.py b/bot/helper/listeners/aria2_listener.py index 2187ee5ad..af6a44bbb 100644 --- a/bot/helper/listeners/aria2_listener.py +++ b/bot/helper/listeners/aria2_listener.py @@ -1,129 +1,64 @@ -import contextlib from time import time from asyncio import sleep +from contextlib import suppress from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove -from bot import LOGGER, aria2, config_dict, download_dict, download_dict_lock +from bot import LOGGER, Intervals, aria2, task_dict, config_dict, task_dict_lock from bot.helper.ext_utils.bot_utils import ( new_thread, sync_to_async, - get_task_by_gid, - get_telegraph_list, bt_selection_buttons, ) -from bot.helper.ext_utils.files_utils import get_base_name, clean_unwanted -from bot.helper.ext_utils.task_manager import limit_checker +from bot.helper.ext_utils.files_utils import clean_unwanted +from bot.helper.ext_utils.status_utils import getTaskByGid +from bot.helper.ext_utils.task_manager import stop_duplicate_check from bot.helper.telegram_helper.message_utils import ( - delete_links, send_message, delete_message, - update_all_messages, + update_status_message, ) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.mirror_leech_utils.status_utils.aria2_status import Aria2Status @new_thread -async def __on_download_started(api, gid): +async def _on_download_started(api, gid): download = await sync_to_async(api.get_download, gid) if download.options.follow_torrent == "false": return if download.is_metadata: LOGGER.info(f"on_download_started: {gid} METADATA") await sleep(1) - if dl := await get_task_by_gid(gid): - listener = dl.listener() - if listener.select: + if task := await getTaskByGid(gid): + task.listener.isTorrent = True + if task.listener.select: metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait." - meta = await send_message(listener.message, metamsg) + meta = await send_message(task.listener.message, metamsg) while True: await sleep(0.5) if download.is_removed or download.followed_by_ids: await delete_message(meta) break - download = download.live + download = await sync_to_async(download.live) return - LOGGER.info(f"Download Started: {download.name} - Gid: {gid}") - dl = None - if config_dict["STOP_DUPLICATE"]: - await sleep(1) - if dl is None: - dl = await get_task_by_gid(gid) - if dl: - if not hasattr(dl, "listener"): - LOGGER.warning( - f"on_download_start: {gid}. STOP_DUPLICATE didn't pass since download completed earlier!" - ) - return - listener = dl.listener() - if ( - not listener.is_leech - and not listener.select - and listener.upPath == "gd" - ): - download = await sync_to_async(api.get_download, gid) - if not download.is_torrent: - await sleep(3) - download = download.live - LOGGER.info("Checking File/Folder if already in Drive...") - name = download.name - if listener.compress: - name = f"{name}.zip" - elif listener.extract: - try: - name = get_base_name(name) - except Exception: - name = None - if name is not None: - telegraph_content, contents_no = await sync_to_async( - GoogleDriveHelper().drive_list, name, True - ) - if telegraph_content: - msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:" - button = await get_telegraph_list(telegraph_content) - await listener.onDownloadError(msg, button) - await sync_to_async( - api.remove, [download], force=True, files=True - ) - await delete_links(listener.message) - return + LOGGER.info(f"on_download_started: {download.name} - Gid: {gid}") await sleep(1) - if dl is None: - dl = await get_task_by_gid(gid) - if dl is not None: - if not hasattr(dl, "listener"): - LOGGER.warning( - f"on_download_start: {gid}. at Download limit didn't pass since download completed earlier!" - ) - return - listener = dl.listener() + + if task := await getTaskByGid(gid): download = await sync_to_async(api.get_download, gid) - download = download.live - if download.total_length == 0: - start_time = time() - while time() - start_time <= 15: - await sleep(0.5) - download = await sync_to_async(api.get_download, gid) - download = download.live - if download.followed_by_ids: - download = await sync_to_async( - api.get_download, download.followed_by_ids[0] - ) - if download.total_length > 0: - break - size = download.total_length - if limit_exceeded := await limit_checker( - size, listener, download.is_torrent - ): - await listener.onDownloadError(limit_exceeded) + await sleep(2) + download = await sync_to_async(download.live) + task.listener.name = download.name + msg, button = await stop_duplicate_check(task.listener) + if msg: + await task.listener.onDownloadError(msg, button) await sync_to_async(api.remove, [download], force=True, files=True) - await delete_links(listener.message) + return @new_thread -async def __on_download_complete(api, gid): +async def _on_download_complete(api, gid): try: download = await sync_to_async(api.get_download, gid) except Exception: @@ -133,53 +68,49 @@ async def __on_download_complete(api, gid): if download.followed_by_ids: new_gid = download.followed_by_ids[0] LOGGER.info(f"Gid changed from {gid} to {new_gid}") - if dl := await get_task_by_gid(new_gid): - listener = dl.listener() - if config_dict["BASE_URL"] and listener.select: - if not dl.queued: + if task := await getTaskByGid(new_gid): + task.listener.isTorrent = True + if config_dict["BASE_URL"] and task.listener.select: + if not task.queued: await sync_to_async(api.client.force_pause, new_gid) - s_buttons = bt_selection_buttons(new_gid) + SBUTTONS = bt_selection_buttons(new_gid) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." - await send_message(listener.message, msg, s_buttons) + await send_message(task.listener.message, msg, SBUTTONS) elif download.is_torrent: - if ( - (dl := await get_task_by_gid(gid)) - and hasattr(dl, "listener") - and dl.seeding - ): - LOGGER.info(f"Cancelling Seed: {download.name} on_download_complete") - listener = dl.listener() - await listener.onUploadError( - f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}" - ) - await sync_to_async(api.remove, [download], force=True, files=True) + if task := await getTaskByGid(gid): + task.listener.isTorrent = True + if hasattr(task, "seeding") and task.seeding: + LOGGER.info(f"Cancelling Seed: {download.name} on_download_complete") + await task.listener.onUploadError( + f"Seeding stopped with Ratio: {task.ratio()} and Time: {task.seeding_time()}" + ) + await sync_to_async(api.remove, [download], force=True, files=True) else: LOGGER.info(f"on_download_complete: {download.name} - Gid: {gid}") - if dl := await get_task_by_gid(gid): - listener = dl.listener() - await listener.on_download_complete() + if task := await getTaskByGid(gid): + await task.listener.on_download_complete() + if Intervals["stopAll"]: + return await sync_to_async(api.remove, [download], force=True, files=True) @new_thread -async def __on_bt_dl_complete(api, gid): +async def _onBtDownloadComplete(api, gid): seed_start_time = time() await sleep(1) download = await sync_to_async(api.get_download, gid) - if download.options.follow_torrent == "false": - return LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}") - if dl := await get_task_by_gid(gid): - listener = dl.listener() - if listener.select: + if task := await getTaskByGid(gid): + task.listener.isTorrent = True + if task.listener.select: res = download.files for file_o in res: f_path = file_o.path if not file_o.selected and await aiopath.exists(f_path): - with contextlib.suppress(Exception): - await aioremove(f_path) + with suppress(Exception): + await remove(f_path) await clean_unwanted(download.dir) - if listener.seed: + if task.listener.seed: try: await sync_to_async( api.set_options, {"max-upload-limit": "0"}, [download] @@ -193,65 +124,67 @@ async def __on_bt_dl_complete(api, gid): await sync_to_async(api.client.force_pause, gid) except Exception as e: LOGGER.error(f"{e} GID: {gid}") - await listener.on_download_complete() - download = download.live - if listener.seed: + await task.listener.on_download_complete() + if Intervals["stopAll"]: + return + download = await sync_to_async(download.live) + if task.listener.seed: if download.is_complete: - if dl := await get_task_by_gid(gid): + if task := await getTaskByGid(gid): LOGGER.info(f"Cancelling Seed: {download.name}") - await listener.onUploadError( - f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}" + await task.listener.onUploadError( + f"Seeding stopped with Ratio: {task.ratio()} and Time: {task.seeding_time()}" ) await sync_to_async( api.remove, [download], force=True, files=True ) - else: - async with download_dict_lock: - if listener.uid not in download_dict: + elif not task.listener.isCancelled: + async with task_dict_lock: + if task.listener.mid not in task_dict: await sync_to_async( api.remove, [download], force=True, files=True ) return - download_dict[listener.uid] = Aria2Status(gid, listener, True) - download_dict[listener.uid].start_time = seed_start_time + task_dict[task.listener.mid] = Aria2Status( + task.listener, gid, True + ) + task_dict[task.listener.mid].start_time = seed_start_time LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}") - await update_all_messages() + await update_status_message(task.listener.message.chat.id) + else: + await sync_to_async(api.remove, [download], force=True, files=True) else: await sync_to_async(api.remove, [download], force=True, files=True) @new_thread -async def __on_download_stopped(_, gid): - await sleep(6) - if dl := await get_task_by_gid(gid): - listener = dl.listener() - await listener.onDownloadError("Dead torrent!") +async def _onDownloadStopped(api, gid): + await sleep(4) + if task := await getTaskByGid(gid): + await task.listener.onDownloadError("Dead torrent!") @new_thread -async def __on_download_error(api, gid): +async def _onDownloadError(api, gid): LOGGER.info(f"onDownloadError: {gid}") error = "None" - try: + with suppress(Exception): download = await sync_to_async(api.get_download, gid) if download.options.follow_torrent == "false": return error = download.error_message LOGGER.info(f"Download Error: {error}") - except Exception: - pass - if dl := await get_task_by_gid(gid): - listener = dl.listener() - await listener.onDownloadError(error) + if task := await getTaskByGid(gid): + await task.listener.onDownloadError(error) def start_aria2_listener(): aria2.listen_to_notifications( threaded=False, - on_download_start=__on_download_started, - on_download_error=__on_download_error, - on_download_stop=__on_download_stopped, - on_download_complete=__on_download_complete, - on_bt_download_complete=__on_bt_dl_complete, + on_download_start=_on_download_started, + on_download_error=_onDownloadError, + on_download_stop=_onDownloadStopped, + on_download_complete=_on_download_complete, + on_bt_download_complete=_onBtDownloadComplete, timeout=60, ) diff --git a/bot/helper/listeners/direct_listener.py b/bot/helper/listeners/direct_listener.py index 7e0aaae9e..b23dfce7f 100644 --- a/bot/helper/listeners/direct_listener.py +++ b/bot/helper/listeners/direct_listener.py @@ -5,78 +5,76 @@ class DirectListener: - def __init__(self, foldername, total_size, path, listener, a2c_opt): - self.__path = path - self.__listener = listener - self.__is_cancelled = False - self.__a2c_opt = a2c_opt - self.task = None - self.name = foldername - self.total_size = total_size - self.proc_bytes = 0 - self.failed = 0 + def __init__(self, path, listener, a2c_opt): + self.listener = listener + self._path = path + self._a2c_opt = a2c_opt + self._proc_bytes = 0 + self._failed = 0 + self.download_task = None + self.name = self.listener.name @property def processed_bytes(self): - if self.task: - return self.proc_bytes + self.task.completed_length - return self.proc_bytes + if self.download_task: + return self._proc_bytes + self.download_task.completed_length + return self._proc_bytes @property def speed(self): - return self.task.download_speed if self.task else 0 + return self.download_task.download_speed if self.download_task else 0 def download(self, contents): self.is_downloading = True for content in contents: - if self.__is_cancelled: + if self.listener.isCancelled: break if content["path"]: - self.__a2c_opt["dir"] = f"{self.__path}/{content['path']}" + self._a2c_opt["dir"] = f"{self._path}/{content['path']}" else: - self.__a2c_opt["dir"] = self.__path + self._a2c_opt["dir"] = self._path filename = content["filename"] - self.__a2c_opt["out"] = filename + self._a2c_opt["out"] = filename try: - self.task = aria2.add_uris( - [content["url"]], self.__a2c_opt, position=0 + self.download_task = aria2.add_uris( + [content["url"]], self._a2c_opt, position=0 ) except Exception as e: - self.failed += 1 + self._failed += 1 LOGGER.error(f"Unable to download {filename} due to: {e}") continue - self.task = self.task.live + self.download_task = self.download_task.live while True: - if self.__is_cancelled: - if self.task: - self.task.remove(True, True) + if self.listener.isCancelled: + if self.download_task: + self.download_task.remove(True, True) break - self.task = self.task.live - if error_message := self.task.error_message: - self.failed += 1 + self.download_task = self.download_task.live + if error_message := self.download_task.error_message: + self._failed += 1 LOGGER.error( - f"Unable to download {self.task.name} due to: {error_message}" + f"Unable to download {self.download_task.name} due to: {error_message}" ) - self.task.remove(True, True) + self.download_task.remove(True, True) break - if self.task.is_complete: - self.proc_bytes += self.task.total_length - self.task.remove(True) + if self.download_task.is_complete: + self._proc_bytes += self.download_task.total_length + self.download_task.remove(True) break sleep(1) - self.task = None - if self.__is_cancelled: + self.download_task = None + if self.listener.isCancelled: return - if self.failed == len(contents): + if self._failed == len(contents): async_to_sync( - self.__listener.onDownloadError, "All files are failed to download!" + self.listener.onDownloadError, "All files are failed to download!" ) return - async_to_sync(self.__listener.on_download_complete) + async_to_sync(self.listener.on_download_complete) - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError("Download Cancelled by User!") - if self.task: - await sync_to_async(self.task.remove, force=True, files=True) + async def cancel_task(self): + self.listener.isCancelled = True + LOGGER.info(f"Cancelling Download: {self.listener.name}") + await self.listener.onDownloadError("Download Cancelled by User!") + if self.download_task: + await sync_to_async(self.download_task.remove, force=True, files=True) diff --git a/bot/helper/listeners/qbit_listener.py b/bot/helper/listeners/qbit_listener.py index 79952ae3e..2b36bbc65 100644 --- a/bot/helper/listeners/qbit_listener.py +++ b/bot/helper/listeners/qbit_listener.py @@ -1,30 +1,30 @@ from time import time -from asyncio import sleep +from asyncio import sleep, gather +from contextlib import suppress + +from aiofiles.os import path as aiopath +from aiofiles.os import remove from bot import ( LOGGER, - QbInterval, + TORRENT_TIMEOUT, + Intervals, QbTorrents, bot_loop, - config_dict, + task_dict, xnox_client, - download_dict, + task_dict_lock, qb_listener_lock, - download_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - new_task, - sync_to_async, - get_task_by_gid, - get_readable_time, ) +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async from bot.helper.ext_utils.files_utils import clean_unwanted -from bot.helper.ext_utils.task_manager import limit_checker, stop_duplicate_check -from bot.helper.telegram_helper.message_utils import update_all_messages +from bot.helper.ext_utils.status_utils import getTaskByGid, get_readable_time +from bot.helper.ext_utils.task_manager import stop_duplicate_check +from bot.helper.telegram_helper.message_utils import update_status_message from bot.helper.mirror_leech_utils.status_utils.qbit_status import QbittorrentStatus -async def __remove_torrent(hash_, tag): +async def _remove_torrent(hash_, tag): await sync_to_async( xnox_client.torrents_delete, torrent_hashes=hash_, delete_files=True ) @@ -35,105 +35,101 @@ async def __remove_torrent(hash_, tag): @new_task -async def __on_download_error(err, tor, button=None): +async def _onDownloadError(err, tor, button=None): LOGGER.info(f"Cancelling Download: {tor.name}") ext_hash = tor.hash - download = await get_task_by_gid(ext_hash[:8]) - listener = download.listener() - await listener.onDownloadError(err, button) - await sync_to_async(xnox_client.torrents_pause, torrent_hashes=ext_hash) + task = await getTaskByGid(ext_hash[:12]) + await gather( + task.listener.onDownloadError(err, button), + sync_to_async(xnox_client.torrents_pause, torrent_hashes=ext_hash), + ) await sleep(0.3) - await __remove_torrent(ext_hash, tor.tags) + await _remove_torrent(ext_hash, tor.tags) @new_task -async def __on_seed_finish(tor): +async def _onSeedFinish(tor): ext_hash = tor.hash LOGGER.info(f"Cancelling Seed: {tor.name}") - download = await get_task_by_gid(ext_hash[:8]) - if not hasattr(download, "seeders_num"): + task = await getTaskByGid(ext_hash[:12]) + if not hasattr(task, "seeders_num"): return - listener = download.listener() - msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time, True)}" - await listener.onUploadError(msg) - await __remove_torrent(ext_hash, tor.tags) + msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time)}" + await task.listener.onUploadError(msg) + await _remove_torrent(ext_hash, tor.tags) @new_task -async def __stop_duplicate(tor): - download = await get_task_by_gid(tor.hash[:8]) - if not hasattr(download, "listener"): +async def _stop_duplicate(tor): + task = await getTaskByGid(tor.hash[:12]) + if not hasattr(task, "listener"): return - listener = download.listener() - name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0] - msg, button = await stop_duplicate_check(name, listener) - if msg: - __on_download_error(msg, tor, button) + if task.listener.stopDuplicate: + task.listener.name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0] + msg, button = await stop_duplicate_check(task.listener) + if msg: + _onDownloadError(msg, tor, button) @new_task -async def __size_checked(tor): - download = await get_task_by_gid(tor.hash[:8]) - if hasattr(download, "listener"): - listener = download.listener() - size = tor.size - if limit_exceeded := await limit_checker(size, listener, True): - await __on_download_error(limit_exceeded, tor) - - -@new_task -async def __on_download_complete(tor): +async def _on_download_complete(tor): ext_hash = tor.hash tag = tor.tags - await sleep(2) - download = await get_task_by_gid(ext_hash[:8]) - listener = download.listener() - if not listener.seed: + task = await getTaskByGid(ext_hash[:12]) + if not task.listener.seed: await sync_to_async(xnox_client.torrents_pause, torrent_hashes=ext_hash) - if listener.select: - await clean_unwanted(listener.dir) - await listener.on_download_complete() - if listener.seed: - async with download_dict_lock: - if listener.uid in download_dict: + if task.listener.select: + await clean_unwanted(task.listener.dir) + path = tor.content_path.rsplit("/", 1)[0] + res = await sync_to_async(xnox_client.torrents_files, torrent_hash=ext_hash) + for f in res: + if f.priority == 0 and await aiopath.exists(f"{path}/{f.name}"): + with suppress(Exception): + await remove(f"{path}/{f.name}") + await task.listener.on_download_complete() + if Intervals["stopAll"]: + return + if task.listener.seed and not task.listener.isCancelled: + async with task_dict_lock: + if task.listener.mid in task_dict: removed = False - download_dict[listener.uid] = QbittorrentStatus(listener, True) + task_dict[task.listener.mid] = QbittorrentStatus(task.listener, True) else: removed = True if removed: - await __remove_torrent(ext_hash, tag) + await _remove_torrent(ext_hash, tag) return async with qb_listener_lock: if tag in QbTorrents: QbTorrents[tag]["seeding"] = True else: return - await update_all_messages() + await update_status_message(task.listener.message.chat.id) LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}") else: - await __remove_torrent(ext_hash, tag) + await _remove_torrent(ext_hash, tag) -async def __qb_listener(): +async def _qb_listener(): while True: async with qb_listener_lock: try: - if len(await sync_to_async(xnox_client.torrents_info)) == 0: - QbInterval.clear() + torrents = await sync_to_async(xnox_client.torrents_info) + if len(torrents) == 0: + Intervals["qb"] = "" break - for tor_info in await sync_to_async(xnox_client.torrents_info): + for tor_info in torrents: tag = tor_info.tags if tag not in QbTorrents: continue state = tor_info.state if state == "metaDL": - TORRENT_TIMEOUT = config_dict["TORRENT_TIMEOUT"] QbTorrents[tag]["stalled_time"] = time() if ( TORRENT_TIMEOUT and time() - tor_info.added_on >= TORRENT_TIMEOUT ): - __on_download_error("Dead Torrent!", tor_info) + _onDownloadError("Dead Torrent!", tor_info) else: await sync_to_async( xnox_client.torrents_reannounce, @@ -141,17 +137,10 @@ async def __qb_listener(): ) elif state == "downloading": QbTorrents[tag]["stalled_time"] = time() - if ( - config_dict["STOP_DUPLICATE"] - and not QbTorrents[tag]["stop_dup_check"] - ): + if not QbTorrents[tag]["stop_dup_check"]: QbTorrents[tag]["stop_dup_check"] = True - __stop_duplicate(tor_info) - if not QbTorrents[tag]["size_checked"]: - QbTorrents[tag]["size_checked"] = True - __size_checked(tor_info) + _stop_duplicate(tor_info) elif state == "stalledDL": - TORRENT_TIMEOUT = config_dict["TORRENT_TIMEOUT"] if ( not QbTorrents[tag]["rechecked"] and 0.99989999999999999 < tor_info.progress < 1 @@ -170,7 +159,7 @@ async def __qb_listener(): and time() - QbTorrents[tag]["stalled_time"] >= TORRENT_TIMEOUT ): - __on_download_error("Dead Torrent!", tor_info) + _onDownloadError("Dead Torrent!", tor_info) else: await sync_to_async( xnox_client.torrents_reannounce, @@ -182,7 +171,7 @@ async def __qb_listener(): torrent_hashes=tor_info.hash, ) elif state == "error": - __on_download_error( + _onDownloadError( "No enough space for this torrent on device", tor_info ) elif ( @@ -192,13 +181,14 @@ async def __qb_listener(): not in ["checkingUP", "checkingDL", "checkingResumeData"] ): QbTorrents[tag]["uploaded"] = True - __on_download_complete(tor_info) + _on_download_complete(tor_info) elif ( state in ["pausedUP", "pausedDL"] and QbTorrents[tag]["seeding"] ): QbTorrents[tag]["seeding"] = False - __on_seed_finish(tor_info) + _onSeedFinish(tor_info) + await sleep(0.5) except Exception as e: LOGGER.error(str(e)) await sleep(3) @@ -212,8 +202,6 @@ async def on_download_start(tag): "rechecked": False, "uploaded": False, "seeding": False, - "size_checked": False, } - if not QbInterval: - periodic = bot_loop.create_task(__qb_listener()) - QbInterval.append(periodic) + if not Intervals["qb"]: + Intervals["qb"] = bot_loop.create_task(_qb_listener()) diff --git a/bot/helper/listeners/task_listener.py b/bot/helper/listeners/task_listener.py new file mode 100644 index 000000000..3c7bddeb2 --- /dev/null +++ b/bot/helper/listeners/task_listener.py @@ -0,0 +1,399 @@ +from html import escape +from asyncio import sleep, gather +from contextlib import suppress + +from aioshutil import move +from aiofiles.os import path as aiopath +from aiofiles.os import remove, listdir, makedirs + +from bot import ( + LOGGER, + DOWNLOAD_DIR, + Intervals, + aria2, + queued_dl, + queued_up, + task_dict, + config_dict, + non_queued_dl, + non_queued_up, + task_dict_lock, + queue_dict_lock, +) +from bot.helper.common import TaskConfig +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.files_utils import ( + join_files, + clean_target, + get_path_size, + clean_download, +) +from bot.helper.ext_utils.links_utils import is_gdrive_id +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.ext_utils.task_manager import start_from_queued, check_running_tasks +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + send_message, + delete_status, + update_status_message, +) +from bot.helper.mirror_leech_utils.telegram_uploader import TgUploader +from bot.helper.mirror_leech_utils.gdrive_utils.upload import gdUpload +from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper +from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus +from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus +from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus +from bot.helper.mirror_leech_utils.status_utils.telegram_status import TelegramStatus + + +class TaskListener(TaskConfig): + def __init__(self): + super().__init__() + + async def clean(self): + with suppress(Exception): + if st := Intervals["status"]: + for intvl in list(st.values()): + intvl.cancel() + Intervals["status"].clear() + await gather(sync_to_async(aria2.purge), delete_status()) + + def rm_from_sm_dir(self): + if self.same_dir and self.mid in self.same_dir["tasks"]: + self.same_dir["tasks"].remove(self.mid) + self.same_dir["total"] -= 1 + + async def on_download_start(self): + # Feature will added in future + pass + + async def on_download_complete(self): + multi_links = False + if self.same_dir and self.mid in self.same_dir["tasks"]: + while not ( + self.same_dir["total"] in [1, 0] + or self.same_dir["total"] > 1 + and len(self.same_dir["tasks"]) > 1 + ): + await sleep(0.5) + + async with task_dict_lock: + if ( + self.same_dir + and self.same_dir["total"] > 1 + and self.mid in self.same_dir["tasks"] + ): + self.same_dir["tasks"].remove(self.mid) + self.same_dir["total"] -= 1 + folder_name = self.same_dir["name"] + spath = f"{self.dir}{folder_name}" + des_path = f"{DOWNLOAD_DIR}{next(iter(self.same_dir['tasks']))}{folder_name}" + await makedirs(des_path, exist_ok=True) + for item in await listdir(spath): + if item.endswith((".aria2", ".!qB")): + continue + item_path = f"{self.dir}{folder_name}/{item}" + if item in await listdir(des_path): + await move(item_path, f"{des_path}/{self.mid}-{item}") + else: + await move(item_path, f"{des_path}/{item}") + multi_links = True + download = task_dict[self.mid] + self.name = download.name() + gid = download.gid() + LOGGER.info(f"Download completed: {self.name}") + + if not (self.isTorrent or self.isQbit): + self.seed = False + + unwanted_files = [] + unwanted_files_size = [] + files_to_delete = [] + + if multi_links: + await self.onUploadError("Downloaded! Waiting for other tasks...") + return + + if not await aiopath.exists(f"{self.dir}/{self.name}"): + try: + files = await listdir(self.dir) + self.name = files[-1] + if self.name == "yt-dlp-thumb": + self.name = files[0] + except Exception as e: + await self.onUploadError(str(e)) + return + + up_path = f"{self.dir}/{self.name}" + self.size = await get_path_size(up_path) + if not config_dict["QUEUE_ALL"]: + async with queue_dict_lock: + if self.mid in non_queued_dl: + non_queued_dl.remove(self.mid) + await start_from_queued() + + if self.join and await aiopath.isdir(up_path): + await join_files(up_path) + + if self.extract: + up_path = await self.proceedExtract(up_path, gid) + if self.isCancelled: + return + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + up_path = await self.remove_website(up_path) + self.name = up_path.rsplit("/", 1)[1] + if self.nameSub: + up_path = await self.substitute(up_path) + if self.isCancelled: + return + self.name = up_path.rsplit("/", 1)[1] + + if self.metadata: + up_path = await self.proceedMetadata(up_path, gid) + if self.isCancelled: + return + + if self.screenShots: + up_path = await self.generateScreenshots(up_path) + if self.isCancelled: + return + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + if self.convertAudio or self.convertVideo: + up_path = await self.convertMedia( + up_path, gid, unwanted_files, unwanted_files_size, files_to_delete + ) + if self.isCancelled: + return + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + if self.sampleVideo: + up_path = await self.generateSampleVideo( + up_path, gid, unwanted_files, files_to_delete + ) + if self.isCancelled: + return + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + if self.compress: + up_path = await self.proceedCompress( + up_path, gid, unwanted_files, files_to_delete + ) + if self.isCancelled: + return + + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + if self.is_leech and not self.compress: + await self.proceedSplit(up_dir, unwanted_files_size, unwanted_files, gid) + if self.isCancelled: + return + + add_to_queue, event = await check_running_tasks(self, "up") + await start_from_queued() + if add_to_queue: + LOGGER.info(f"Added to Queue/Upload: {self.name}") + async with task_dict_lock: + task_dict[self.mid] = QueueStatus(self, gid, "Up") + await event.wait() + if self.isCancelled: + return + async with queue_dict_lock: + non_queued_up.add(self.mid) + LOGGER.info(f"Start from Queued/Upload: {self.name}") + + self.size = await get_path_size(up_dir) + for s in unwanted_files_size: + self.size -= s + + if self.is_leech: + LOGGER.info(f"Leech Name: {self.name}") + tg = TgUploader(self, up_dir) + async with task_dict_lock: + task_dict[self.mid] = TelegramStatus(self, tg, gid, "up") + await gather( + update_status_message(self.message.chat.id), + tg.upload(unwanted_files, files_to_delete), + ) + elif is_gdrive_id(self.upDest): + LOGGER.info(f"Gdrive Upload Name: {self.name}") + drive = gdUpload(self, up_path) + async with task_dict_lock: + task_dict[self.mid] = GdriveStatus(self, drive, gid, "up") + await gather( + update_status_message(self.message.chat.id), + sync_to_async(drive.upload, unwanted_files, files_to_delete), + ) + else: + LOGGER.info(f"Rclone Upload Name: {self.name}") + RCTransfer = RcloneTransferHelper(self) + async with task_dict_lock: + task_dict[self.mid] = RcloneStatus(self, RCTransfer, gid, "up") + await gather( + update_status_message(self.message.chat.id), + RCTransfer.upload(up_path, unwanted_files, files_to_delete), + ) + + async def onUploadComplete( + self, link, files, folders, mime_type, rclonePath="", dir_id="" + ): + msg = f"Name:{escape(self.name)}
\n\nSize: {get_readable_file_size(self.size)}" + done_msg = f"{self.tag}\nYour task is complete\nPlease check your inbox." + if self.is_leech: + msg += f"\nTotal Files: {folders}" + if mime_type != 0: + msg += f"\nCorrupted Files: {mime_type}" + msg += f"\nBy: {self.tag}\nUid: {self.userId}\n\n" + if not files: + await send_message(self.message, msg) + else: + fmsg = "" + for index, (link, name) in enumerate(files.items(), start=1): + fmsg += f"{index}. {name}\n" + if len(fmsg.encode() + msg.encode()) > 4000: + await send_message( + self.userId, + f"{msg}{fmsg}", + ) + if config_dict["LOG_CHAT"]: + await send_message( + config_dict["LOG_CHAT"], + f"{msg}{fmsg}", + ) + await sleep(1) + fmsg = "" + if fmsg != "": + await send_message( + self.userId, + f"{msg}{fmsg}", + ) + if config_dict["LOG_CHAT"]: + await send_message( + config_dict["LOG_CHAT"], + f"{msg}{fmsg}", + ) + await send_message(self.message, done_msg) + else: + if mime_type == "Folder": + msg += f"\nSubFolders: {folders}" + msg += f"\nFiles: {files}" + if link or rclonePath and not self.privateLink: + buttons = ButtonMaker() + if link: + buttons.url("Cloud Link", link) + else: + msg += f"\n\nPath:{rclonePath}
" + if not rclonePath and dir_id: + INDEX_URL = "" + if self.privateLink: + INDEX_URL = self.userDict.get("index_url", "") or "" + elif config_dict["INDEX_URL"]: + INDEX_URL = config_dict["INDEX_URL"] + if INDEX_URL: + share_url = f"{INDEX_URL}findpath?id={dir_id}" + buttons.url("Index Link", share_url) + if mime_type.startswith(("image", "video", "audio")): + share_urls = f"{INDEX_URL}findpath?id={dir_id}&view=true" + buttons.url("View Link", share_urls) + button = buttons.menu(2) + else: + msg += f"\n\nPath:{rclonePath}
" + button = None + msg += f"\n\nBy: {self.tag}\nUid: {self.userId}" + await send_message(self.userId, msg, button) + if config_dict["LOG_CHAT"]: + await send_message(config_dict["LOG_CHAT"], msg, button) + await send_message(self.message, done_msg) + if self.seed: + if self.newDir: + await clean_target(self.newDir) + async with queue_dict_lock: + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + await start_from_queued() + return + await clean_download(self.dir) + async with task_dict_lock: + if self.mid in task_dict: + del task_dict[self.mid] + count = len(task_dict) + if count == 0: + await self.clean() + else: + await update_status_message(self.message.chat.id) + + async with queue_dict_lock: + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + + await start_from_queued() + + async def onDownloadError(self, error, button=None): + async with task_dict_lock: + if self.mid in task_dict: + del task_dict[self.mid] + count = len(task_dict) + self.rm_from_sm_dir() + msg = f"{self.tag} Download: {escape(error)}" + await send_message(self.message, msg, button) + if count == 0: + await self.clean() + else: + await update_status_message(self.message.chat.id) + + async with queue_dict_lock: + if self.mid in queued_dl: + queued_dl[self.mid].set() + del queued_dl[self.mid] + if self.mid in queued_up: + queued_up[self.mid].set() + del queued_up[self.mid] + if self.mid in non_queued_dl: + non_queued_dl.remove(self.mid) + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + + await start_from_queued() + await sleep(3) + await clean_download(self.dir) + if self.newDir: + await clean_download(self.newDir) + if self.thumb and await aiopath.exists(self.thumb): + await remove(self.thumb) + + async def onUploadError(self, error): + async with task_dict_lock: + if self.mid in task_dict: + del task_dict[self.mid] + count = len(task_dict) + await send_message(self.message, f"{self.tag} {escape(error)}") + if count == 0: + await self.clean() + else: + await update_status_message(self.message.chat.id) + + async with queue_dict_lock: + if self.mid in queued_dl: + queued_dl[self.mid].set() + del queued_dl[self.mid] + if self.mid in queued_up: + queued_up[self.mid].set() + del queued_up[self.mid] + if self.mid in non_queued_dl: + non_queued_dl.remove(self.mid) + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + + await start_from_queued() + await sleep(3) + await clean_download(self.dir) + if self.newDir: + await clean_download(self.newDir) + if self.thumb and await aiopath.exists(self.thumb): + await remove(self.thumb) diff --git a/bot/helper/listeners/tasks_listener.py b/bot/helper/listeners/tasks_listener.py deleted file mode 100644 index 7ceadfbcb..000000000 --- a/bot/helper/listeners/tasks_listener.py +++ /dev/null @@ -1,677 +0,0 @@ -from os import path as ospath -from os import walk -from html import escape -from time import time -from asyncio import Event, sleep, create_subprocess_exec - -from requests import utils as rutils -from aioshutil import move -from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove -from aiofiles.os import listdir, makedirs -from pyrogram.enums import ChatType - -from bot import ( - LOGGER, - MAX_SPLIT_SIZE, - GLOBAL_EXTENSION_FILTER, - Interval, - aria2, - queued_dl, - queued_up, - config_dict, - download_dict, - non_queued_dl, - non_queued_up, - queue_dict_lock, - download_dict_lock, - status_reply_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - extra_btns, - sync_to_async, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.exceptions import ExtractionArchiveError -from bot.helper.ext_utils.files_utils import ( - is_archive, - join_files, - split_file, - clean_target, - process_file, - get_base_name, - get_path_size, - clean_download, - is_archive_split, - is_first_archive_split, -) -from bot.helper.ext_utils.task_manager import start_from_queued -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - delete_links, - edit_message, - send_message, - sendCustomMsg, - delete_message, - five_minute_del, - sendMultiMessage, - delete_all_messages, - update_all_messages, -) -from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper -from bot.helper.mirror_leech_utils.status_utils.zip_status import ZipStatus -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus -from bot.helper.mirror_leech_utils.status_utils.split_status import SplitStatus -from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus -from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus -from bot.helper.mirror_leech_utils.status_utils.extract_status import ExtractStatus -from bot.helper.mirror_leech_utils.upload_utils.telegramEngine import TgUploader -from bot.helper.mirror_leech_utils.status_utils.telegram_status import TelegramStatus - - -class MirrorLeechListener: - def __init__( - self, - message, - compress=False, - extract=False, - is_qbit=False, - is_leech=False, - tag=None, - select=False, - seed=False, - same_dir=None, - rc_flags=None, - upPath=None, - is_clone=False, - join=False, - is_ytdlp=False, - drive_id=None, - index_link=None, - attachment=None, - files_utils={}, - ): - if same_dir is None: - same_dir = {} - self.message = message - self.uid = message.id - self.extract = extract - self.compress = compress - self.is_qbit = is_qbit - self.is_leech = is_leech - self.is_clone = is_clone - self.is_ytdlp = is_ytdlp - self.tag = tag - self.seed = seed - self.newDir = "" - self.dir = f"/usr/src/app/downloads/{self.uid}" - self.select = select - self.isSuperGroup = message.chat.type in [ - ChatType.SUPERGROUP, - ChatType.CHANNEL, - ] - self.isPrivate = message.chat.type == ChatType.BOT - self.suproc = None - self.same_dir = same_dir - self.rc_flags = rc_flags - self.upPath = upPath - self.join = join - self.linkslogmsg = None - self.botpmmsg = None - self.drive_id = drive_id - self.index_link = index_link - self.files_utils = files_utils - self.attachment = attachment - - async def clean(self): - try: - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - await sync_to_async(aria2.purge) - await delete_all_messages() - except Exception: - pass - - async def on_download_start(self): - if config_dict["LEECH_LOG_ID"]: - msg = "Task Started\n\n" - msg += f"• Task by: {self.tag}\n" - msg += f"• User ID:{self.message.from_user.id}
" - self.linkslogmsg = await sendCustomMsg(config_dict["LEECH_LOG_ID"], msg) - self.botpmmsg = await sendCustomMsg( - self.message.from_user.id, "Task started" - ) - - async def on_download_complete(self): - multi_links = False - while True: - if self.same_dir: - if ( - self.same_dir["total"] in [1, 0] - or self.same_dir["total"] > 1 - and len(self.same_dir["tasks"]) > 1 - ): - break - else: - break - await sleep(0.2) - async with download_dict_lock: - if self.same_dir and self.same_dir["total"] > 1: - self.same_dir["tasks"].remove(self.uid) - self.same_dir["total"] -= 1 - folder_name = self.same_dir["name"] - spath = f"{self.dir}/{folder_name}" - des_path = f"/usr/src/app/downloads/{next(iter(self.same_dir['tasks']))}/{folder_name}" - await makedirs(des_path, exist_ok=True) - for item in await listdir(spath): - if item.endswith((".aria2", ".!qB")): - continue - item_path = f"{self.dir}/{folder_name}/{item}" - if item in await listdir(des_path): - await move(item_path, f"{des_path}/{self.uid}-{item}") - else: - await move(item_path, f"{des_path}/{item}") - multi_links = True - download = download_dict[self.uid] - name = str(download.name()).replace("/", "") - gid = download.gid() - LOGGER.info(f"Download completed: {name}") - if multi_links: - await self.onUploadError( - "Downloaded! Starting other part of the Task..." - ) - return - if ( - name == "None" - or self.is_qbit - or not await aiopath.exists(f"{self.dir}/{name}") - ): - try: - files = await listdir(self.dir) - except Exception as e: - await self.onUploadError(str(e)) - return - name = files[-1] - if name == "yt-dlp-thumb": - name = files[0] - - dl_path = f"{self.dir}/{name}" - up_path = "" - size = await get_path_size(dl_path) - async with queue_dict_lock: - if self.uid in non_queued_dl: - non_queued_dl.remove(self.uid) - await start_from_queued() - - if self.join and await aiopath.isdir(dl_path): - await join_files(dl_path) - - if self.extract: - pswd = self.extract if isinstance(self.extract, str) else "" - try: - if await aiopath.isfile(dl_path): - up_path = get_base_name(dl_path) - LOGGER.info(f"Extracting: {name}") - async with download_dict_lock: - download_dict[self.uid] = ExtractStatus(name, size, gid, self) - if await aiopath.isdir(dl_path): - if self.seed: - self.newDir = f"{self.dir}10000" - up_path = f"{self.newDir}/{name}" - else: - up_path = dl_path - for dirpath, _, files in await sync_to_async( - walk, dl_path, topdown=False - ): - for file_ in files: - if ( - is_first_archive_split(file_) - or is_archive(file_) - and not file_.endswith(".rar") - ): - f_path = ospath.join(dirpath, file_) - t_path = ( - dirpath.replace(self.dir, self.newDir) - if self.seed - else dirpath - ) - cmd = [ - "7z", - "x", - f"-p{pswd}", - f_path, - f"-o{t_path}", - "-aot", - "-xr!@PaxHeader", - ] - if not pswd: - del cmd[2] - if ( - self.suproc == "cancelled" - or self.suproc is not None - and self.suproc.returncode == -9 - ): - return - self.suproc = await create_subprocess_exec(*cmd) - code = await self.suproc.wait() - if code == -9: - return - if code != 0: - LOGGER.error("Unable to extract archive splits!") - if ( - not self.seed - and self.suproc is not None - and self.suproc.returncode == 0 - ): - for file_ in files: - if is_archive_split(file_) or is_archive(file_): - del_path = ospath.join(dirpath, file_) - try: - await aioremove(del_path) - except Exception: - return - else: - if self.seed: - self.newDir = f"{self.dir}10000" - up_path = up_path.replace(self.dir, self.newDir) - cmd = [ - "7z", - "x", - f"-p{pswd}", - dl_path, - f"-o{up_path}", - "-aot", - "-xr!@PaxHeader", - ] - if not pswd: - del cmd[2] - if self.suproc == "cancelled": - return - self.suproc = await create_subprocess_exec(*cmd) - code = await self.suproc.wait() - if code == -9: - return - if code == 0: - LOGGER.info(f"Extracted Path: {up_path}") - if not self.seed: - try: - await aioremove(dl_path) - except Exception: - return - else: - LOGGER.error("Unable to extract archive! Uploading anyway") - self.newDir = "" - up_path = dl_path - except ExtractionArchiveError: - LOGGER.info("Not any valid archive, uploading file as it is.") - self.newDir = "" - up_path = dl_path - - if self.compress: - pswd = self.compress if isinstance(self.compress, str) else "" - if up_path: - dl_path = up_path - up_path = f"{up_path}.zip" - elif self.seed and self.is_leech: - self.newDir = f"{self.dir}10000" - up_path = f"{self.newDir}/{name}.zip" - else: - up_path = f"{dl_path}.zip" - async with download_dict_lock: - download_dict[self.uid] = ZipStatus(name, size, gid, self) - LEECH_SPLIT_SIZE = MAX_SPLIT_SIZE - cmd = [ - "7z", - f"-v{LEECH_SPLIT_SIZE}b", - "a", - "-mx=0", - f"-p{pswd}", - up_path, - dl_path, - ] - for ext in GLOBAL_EXTENSION_FILTER: - ex_ext = f"-xr!*.{ext}" - cmd.append(ex_ext) - if self.is_leech and int(size) > LEECH_SPLIT_SIZE: - if not pswd: - del cmd[4] - LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}.0*") - else: - del cmd[1] - if not pswd: - del cmd[3] - LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}") - if self.suproc == "cancelled": - return - self.suproc = await create_subprocess_exec(*cmd) - code = await self.suproc.wait() - if code == -9: - return - if not self.seed: - await clean_target(dl_path) - - if not self.compress and not self.extract: - up_path = dl_path - - up_dir, up_name = up_path.rsplit("/", 1) - size = await get_path_size(up_dir) - if self.is_leech: - m_size = [] - o_files = [] - if not self.compress: - checked = False - LEECH_SPLIT_SIZE = MAX_SPLIT_SIZE - for dirpath, _, files in await sync_to_async( - walk, up_dir, topdown=False - ): - for file_ in files: - f_path = ospath.join(dirpath, file_) - f_size = await aiopath.getsize(f_path) - if f_size > LEECH_SPLIT_SIZE: - if not checked: - checked = True - async with download_dict_lock: - download_dict[self.uid] = SplitStatus( - up_name, size, gid, self - ) - LOGGER.info(f"Splitting: {up_name}") - res = await split_file( - f_path, - f_size, - file_, - dirpath, - LEECH_SPLIT_SIZE, - self, - ) - if not res: - return - if res == "errored": - if f_size <= MAX_SPLIT_SIZE: - continue - try: - await aioremove(f_path) - except Exception: - return - elif not self.seed or self.newDir: - try: - await aioremove(f_path) - except Exception: - return - else: - m_size.append(f_size) - o_files.append(file_) - - up_limit = config_dict["QUEUE_UPLOAD"] - all_limit = config_dict["QUEUE_ALL"] - added_to_queue = False - async with queue_dict_lock: - dl = len(non_queued_dl) - up = len(non_queued_up) - if ( - all_limit - and dl + up >= all_limit - and (not up_limit or up >= up_limit) - ) or (up_limit and up >= up_limit): - added_to_queue = True - LOGGER.info(f"Added to Queue/Upload: {name}") - event = Event() - queued_up[self.uid] = event - if added_to_queue: - async with download_dict_lock: - download_dict[self.uid] = QueueStatus(name, size, gid, self, "Up") - await event.wait() - async with download_dict_lock: - if self.uid not in download_dict: - return - LOGGER.info(f"Start from Queued/Upload: {name}") - async with queue_dict_lock: - non_queued_up.add(self.uid) - if self.is_leech: - size = await get_path_size(up_dir) - for s in m_size: - size = size - s - LOGGER.info(f"Leech Name: {up_name}") - tg = TgUploader(up_name, up_dir, self) - tg_upload_status = TelegramStatus(tg, size, self.message, gid, "up") - async with download_dict_lock: - download_dict[self.uid] = tg_upload_status - await update_all_messages() - await tg.upload(o_files, m_size, size) - elif self.upPath == "gd": - size = await get_path_size(up_path) - LOGGER.info(f"Upload Name: {up_name}") - drive = GoogleDriveHelper(up_name, up_dir, self) - upload_status = GdriveStatus(drive, size, self.message, gid, "up") - async with download_dict_lock: - download_dict[self.uid] = upload_status - await update_all_messages() - await sync_to_async(drive.upload, up_name, size, self.drive_id) - else: - size = await get_path_size(up_path) - LOGGER.info(f"Upload Name: {up_name} via RClone") - RCTransfer = RcloneTransferHelper(self, up_name) - async with download_dict_lock: - download_dict[self.uid] = RcloneStatus( - RCTransfer, self.message, gid, "up" - ) - await update_all_messages() - await RCTransfer.upload(up_path, size) - - async def onUploadComplete( - self, link, size, files, folders, mime_type, name, rclonePath="" - ): - user_id = self.message.from_user.id - name, _ = await process_file(name, user_id, is_mirror=not self.is_leech) - msg = f"{escape(name)}\n\n" - msg += f"• Size: {get_readable_file_size(size)}\n" - msg += f"• Elapsed: {get_readable_time(time() - self.message.date.timestamp())}\n" - LOGGER.info(f"Task Done: {name}") - buttons = ButtonMaker() - inboxButton = ButtonMaker() - inboxButton.callback("View in inbox", f"aeon {user_id} private", "header") - inboxButton = extra_btns(inboxButton) - if self.is_leech: - if folders > 1: - msg += f"• Total files: {folders}\n" - if mime_type != 0: - msg += f"• Corrupted files: {mime_type}\n" - msg += f"• User ID:\n\n" - if not files: - if self.isPrivate: - msg += ( - "Files have not been sent for an unspecified reason" - ) - await send_message(self.message, msg) - else: - attachmsg = True - fmsg, totalmsg = "\n\n", "" - lmsg = "Files have been sent. Access them via the provided links." - for index, (dlink, name) in enumerate(files.items(), start=1): - fmsg += f"{index}. {name}\n" - totalmsg = (msg + lmsg + fmsg) if attachmsg else fmsg - if len(totalmsg.encode()) > 3900: - if self.linkslogmsg: - await edit_message(self.linkslogmsg, totalmsg) - await send_message(self.botpmmsg, totalmsg) - self.linkslogmsg = await send_message( - self.linkslogmsg, "Fetching Details..." - ) - attachmsg = False - await sleep(1) - fmsg = "\n\n" - if fmsg != "\n\n" and self.linkslogmsg: - await send_message(self.linkslogmsg, msg + lmsg + fmsg) - await delete_message(self.linkslogmsg) - await send_message(self.botpmmsg, msg + lmsg + fmsg) - await delete_message(self.botpmmsg) - if self.isSuperGroup: - await send_message( - self.message, - f"{msg}Files has been sent to your inbox", - inboxButton.column(1), - ) - else: - await delete_message(self.botpmmsg) - if self.seed: - if self.newDir: - await clean_target(self.newDir) - async with queue_dict_lock: - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - await start_from_queued() - return - else: - if mime_type == "Folder": - msg += f"• Total files: {files}\n" - if link: - buttons.url("Cloud link", link) - INDEX_URL = ( - self.index_link if self.drive_id else config_dict["INDEX_URL"] - ) - if not rclonePath and INDEX_URL: - url_path = rutils.quote(f"{name}") - share_url = f"{INDEX_URL}/{url_path}" - if mime_type == "Folder": - share_url += "/" - buttons.url("Index link", share_url) - buttons = extra_btns(buttons) - button = buttons.column(2) - elif rclonePath: - msg += f"• Path:{self.message.from_user.id}
\n" - msg += f"• By: {self.tag}{rclonePath}
\n" - button = None - buttons = extra_btns(buttons) - button = buttons.column(2) - msg += f"• User ID:{self.message.from_user.id}
\n" - msg += f"• By: {self.tag}Reason: {escape(error)}\n" - msg += f"Elapsed: {get_readable_time(time() - self.message.date.timestamp())}" - x = await send_message(self.message, msg, button) - await delete_links(self.message) - if self.botpmmsg: - await delete_message(self.botpmmsg) - if self.linkslogmsg: - await delete_message(self.linkslogmsg) - if count == 0: - await self.clean() - else: - await update_all_messages() - if self.isSuperGroup and self.botpmmsg: - await send_message(self.botpmmsg, msg, button) - await five_minute_del(x) - - async with queue_dict_lock: - if self.uid in queued_dl: - queued_dl[self.uid].set() - del queued_dl[self.uid] - if self.uid in queued_up: - queued_up[self.uid].set() - del queued_up[self.uid] - if self.uid in non_queued_dl: - non_queued_dl.remove(self.uid) - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - - await start_from_queued() - await sleep(3) - await clean_download(self.dir) - if self.newDir: - await clean_download(self.newDir) - - async def onUploadError(self, error): - async with download_dict_lock: - if self.uid in download_dict: - del download_dict[self.uid] - count = len(download_dict) - msg = f"Hey, {self.tag}!\n" - msg += "Your upload has been stopped!\n\n" - msg += f"Reason: {escape(error)}\n" - msg += f"Elapsed: {get_readable_time(time() - self.message.date.timestamp())}" - x = await send_message(self.message, msg) - if self.linkslogmsg: - await delete_message(self.linkslogmsg) - await delete_links(self.message) - if self.botpmmsg: - await delete_message(self.botpmmsg) - if count == 0: - await self.clean() - else: - await update_all_messages() - if self.isSuperGroup and self.botpmmsg: - await send_message(self.botpmmsg, msg) - await five_minute_del(x) - - async with queue_dict_lock: - if self.uid in queued_dl: - queued_dl[self.uid].set() - del queued_dl[self.uid] - if self.uid in queued_up: - queued_up[self.uid].set() - del queued_up[self.uid] - if self.uid in non_queued_dl: - non_queued_dl.remove(self.uid) - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - - await start_from_queued() - await sleep(3) - await clean_download(self.dir) - if self.newDir: - await clean_download(self.newDir) diff --git a/bot/helper/mirror_leech_utils/__init__.py b/bot/helper/mirror_leech_utils/__init__.py index 8b1378917..e69de29bb 100644 --- a/bot/helper/mirror_leech_utils/__init__.py +++ b/bot/helper/mirror_leech_utils/__init__.py @@ -1 +0,0 @@ - diff --git a/bot/helper/mirror_leech_utils/download_utils/aria2_download.py b/bot/helper/mirror_leech_utils/download_utils/aria2_download.py index b6236b559..13c973f7c 100644 --- a/bot/helper/mirror_leech_utils/download_utils/aria2_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/aria2_download.py @@ -1,97 +1,96 @@ from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove from bot import ( LOGGER, + TORRENT_TIMEOUT, aria2, + task_dict, config_dict, aria2_options, aria2c_global, - download_dict, non_queued_dl, + task_dict_lock, queue_dict_lock, - download_dict_lock, ) from bot.helper.ext_utils.bot_utils import sync_to_async, bt_selection_buttons -from bot.helper.ext_utils.task_manager import is_queued +from bot.helper.ext_utils.task_manager import check_running_tasks from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage from bot.helper.mirror_leech_utils.status_utils.aria2_status import Aria2Status -async def add_aria2c_download( - link, path, listener, filename, header, ratio, seed_time -): +async def add_aria2c_download(listener, dpath, header, ratio, seed_time): a2c_opt = {**aria2_options} [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options] - a2c_opt["dir"] = path - if filename: - a2c_opt["out"] = filename + a2c_opt["dir"] = dpath + if listener.name: + a2c_opt["out"] = listener.name if header: a2c_opt["header"] = header if ratio: a2c_opt["seed-ratio"] = ratio if seed_time: a2c_opt["seed-time"] = seed_time - if TORRENT_TIMEOUT := config_dict["TORRENT_TIMEOUT"]: - a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}" - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - if link.startswith("magnet:"): + a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}" + + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + if listener.link.startswith("magnet:"): a2c_opt["pause-metadata"] = "true" else: a2c_opt["pause"] = "true" + try: - download = (await sync_to_async(aria2.add, link, a2c_opt))[0] + download = (await sync_to_async(aria2.add, listener.link, a2c_opt))[0] except Exception as e: LOGGER.info(f"Aria2c Download Error: {e}") - await send_message(listener.message, f"{e}") + await listener.onDownloadError(f"{e}") return - if await aiopath.exists(link): - await aioremove(link) + if await aiopath.exists(listener.link): + await remove(listener.link) if download.error_message: error = str(download.error_message).replace("<", " ").replace(">", " ") LOGGER.info(f"Aria2c Download Error: {error}") - await send_message(listener.message, error) + await listener.onDownloadError(error) return gid = download.gid name = download.name - async with download_dict_lock: - download_dict[listener.uid] = Aria2Status( - gid, listener, queued=added_to_queue - ) - if added_to_queue: + async with task_dict_lock: + task_dict[listener.mid] = Aria2Status(listener, gid, queued=add_to_queue) + if add_to_queue: LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}") - if not listener.select or not download.is_torrent: + if (not listener.select or not download.is_torrent) and listener.multi <= 1: await sendStatusMessage(listener.message) else: - async with queue_dict_lock: - non_queued_dl.add(listener.uid) LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}") await listener.on_download_start() - if not added_to_queue and (not listener.select or not config_dict["BASE_URL"]): + if ( + not add_to_queue + and (not listener.select or not config_dict["BASE_URL"]) + and listener.multi <= 1 + ): await sendStatusMessage(listener.message) elif listener.select and download.is_torrent and not download.is_metadata: - if not added_to_queue: + if not add_to_queue: await sync_to_async(aria2.client.force_pause, gid) - s_buttons = bt_selection_buttons(gid) + SBUTTONS = bt_selection_buttons(gid) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." - await send_message(listener.message, msg, s_buttons) + await send_message(listener.message, msg, SBUTTONS) - if added_to_queue: + if add_to_queue: await event.wait() - - async with download_dict_lock: - if listener.uid not in download_dict: - return - download = download_dict[listener.uid] - download.queued = False - new_gid = download.gid() + if listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(listener.mid) + async with task_dict_lock: + task = task_dict[listener.mid] + task.queued = False + await sync_to_async(task.update) + new_gid = task.gid() await sync_to_async(aria2.client.unpause, new_gid) LOGGER.info(f"Start Queued Download from Aria2c: {name}. Gid: {gid}") - - async with queue_dict_lock: - non_queued_dl.add(listener.uid) diff --git a/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py b/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py index 5c1649e7c..9eb1c366c 100644 --- a/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py +++ b/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py @@ -2,72 +2,54 @@ from bot import ( LOGGER, + task_dict, aria2_options, aria2c_global, - download_dict, non_queued_dl, + task_dict_lock, queue_dict_lock, - download_dict_lock, ) from bot.helper.ext_utils.bot_utils import sync_to_async -from bot.helper.aeon_utils.nsfw_check import is_nsfw_data from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) from bot.helper.listeners.direct_listener import DirectListener -from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, - one_minute_del, - sendStatusMessage, -) +from bot.helper.telegram_helper.message_utils import sendStatusMessage from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.direct_status import DirectStatus -async def add_direct_download(details, path, listener, foldername): +async def add_direct_download(listener, path): + details = listener.link if not (contents := details.get("contents")): - await send_message(listener.message, "There is nothing to download!") - return - size = details["total_size"] - if not foldername: - foldername = details["title"] - if is_nsfw_data(details): - await listener.onDownloadError("NSFW detected") + await listener.onDownloadError("There is nothing to download!") return - path = f"{path}/{foldername}" - msg, button = await stop_duplicate_check(foldername, listener) + listener.size = details["total_size"] + + if not listener.name: + listener.name = details["title"] + path = f"{path}/{listener.name}" + + msg, button = await stop_duplicate_check(listener) if msg: - msg = await send_message(listener.message, msg, button) - await delete_links(listener.message) - await one_minute_del(msg) - return - if limit_exceeded := await limit_checker(size, listener): - LOGGER.info(f"Limit Exceeded: {foldername} | {size}") - msg = await send_message(listener.message, limit_exceeded) - await delete_links(listener.message) - await one_minute_del(msg) + await listener.onDownloadError(msg, button) return gid = token_hex(4) - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {foldername}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - foldername, size, gid, listener, "dl" - ) + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await sendStatusMessage(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False + if listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(listener.mid) a2c_opt = {**aria2_options} [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options] @@ -75,19 +57,17 @@ async def add_direct_download(details, path, listener, foldername): a2c_opt["header"] = header a2c_opt["follow-torrent"] = "false" a2c_opt["follow-metalink"] = "false" - directListener = DirectListener(foldername, size, path, listener, a2c_opt) - async with download_dict_lock: - download_dict[listener.uid] = DirectStatus(directListener, gid, listener) + directListener = DirectListener(path, listener, a2c_opt) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + async with task_dict_lock: + task_dict[listener.mid] = DirectStatus(listener, directListener, gid) - if from_queue: - LOGGER.info(f"Start Queued Download from Direct Download: {foldername}") + if add_to_queue: + LOGGER.info(f"Start Queued Download from Direct Download: {listener.name}") else: - LOGGER.info(f"Download from Direct Download: {foldername}") + LOGGER.info(f"Download from Direct Download: {listener.name}") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await sendStatusMessage(listener.message) - await delete_links(listener.message) await sync_to_async(directListener.download, contents) diff --git a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py index d1c049ae1..fcde7afaf 100644 --- a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py +++ b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py @@ -1,126 +1,180 @@ -from os import path +from os import path as ospath from re import match, search, findall from json import loads from time import sleep from uuid import uuid4 +from base64 import b64decode from hashlib import sha256 +from contextlib import suppress from urllib.parse import parse_qs, urlparse +from http.cookiejar import MozillaCookieJar -from bs4 import BeautifulSoup +from aiohttp import ClientSession from requests import Session, get, post -from requests import session as req_session from lxml.etree import HTML from cloudscraper import create_scraper from requests.adapters import HTTPAdapter from urllib3.util.retry import Retry from bot import config_dict -from bot.helper.ext_utils.bot_utils import text_to_bytes -from bot.helper.ext_utils.exceptions import DirectDownloadLinkError -from bot.helper.ext_utils.help_strings import PASSWORD_ERROR_MESSAGE +from bot.helper.ext_utils.bot_utils import async_to_sync +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.links_utils import is_share_link +from bot.helper.ext_utils.status_utils import speed_string_to_bytes +from bot.helper.ext_utils.help_messages import PASSWORD_ERROR_MESSAGE _caches = {} user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:122.0) Gecko/20100101 Firefox/122.0" -domain_dict = { - "mediafire": ["mediafire.com"], - "osdn": ["osdn.net"], - "github": ["github.com"], - "hxfile": ["hxfile.co"], - "onedrive": ["1drv.ms"], - "pixeldrain": ["pixeldrain.com"], - "racaty": ["racaty"], - "fichier": ["1fichier.com"], - "solidfiles": ["solidfiles.com"], - "krakenfiles": ["krakenfiles.com"], - "uploadee": ["upload.ee"], - "gofile": ["gofile.io"], - "send_cm": ["send.cm"], - "easyupload": ["easyupload.io"], - "hubdrive": ["hubdrive"], - "streamvid": ["streamvid.net"], - "shrdsk": ["shrdsk.me"], - "streamhub": ["streamhub.ink"], - "appflix": ["appdrive", "gdflix"], - "akmfiles": ["akmfiles.com", "akmfls.xyz"], - "doods": [ - "dood.watch", - "doodstream.com", - "dood.to", - "dood.so", - "dood.cx", - "dood.la", - "dood.ws", - "dood.sh", - "doodstream.co", - "dood.pm", - "dood.wf", - "dood.re", - "dood.video", - "dooood.com", - "dood.yt", - "doods.yt", - "dood.stream", - "doods.pro", - "ds2play.com", - ], - "streamtape": [ - "streamtape.com", - "streamtape.co", - "streamtape.cc", - "streamtape.to", - "streamtape.net", - "streamta.pe", - "streamtape.xyz", - ], - "wetransfer": ["wetransfer.com", "we.tl"], - "terabox": [ - "terabox.com", - "nephobox.com", - "4funbox.com", - "mirrobox.com", - "momerybox.com", - "teraboxapp.com", - "1024tera.com", - "terabox.app", - "gibibox.com", - "goaibox.com", - "terasharelink.com", - "teraboxlink.com", - "freeterabox.com", - "1024terabox.com", - "teraboxshare.com", - ], - "filewish": [ - "filelions.co", - "filelions.site", - "filelions.live", - "filelions.lol", - "filelions.to", - "cabecabean.lol", - "filelions.online", - "embedwish.com", - "streamwish.com", - "kitabmarkaz.xyz", - "wishfast.top", - "streamwish.to", - ], - "linkBox": ["linkbox.to", "lbx.to", "telbx.net", "teltobx.net"], - "filepress": ["filepress"], - "pcloud": ["u.pcloud.link"], -} def direct_link_generator(link): domain = urlparse(link).hostname if not domain: - raise DirectDownloadLinkError("ERROR: Invalid URL") + raise DirectDownloadLinkException("ERROR: Invalid URL") if "youtube.com" in domain or "youtu.be" in domain: - raise DirectDownloadLinkError("ERROR: Use ytdl cmds for Youtube links") - for func_name, domain_list in domain_dict.items(): - if any(x in domain for x in domain_list): - func = globals().get(func_name) - return func(link) - raise DirectDownloadLinkError(f"No Direct link function found for {link}") + raise DirectDownloadLinkException("ERROR: Use ytdl cmds for Youtube links") + if "yadi.sk" in link or "disk.yandex." in link: + return yandex_disk(link) + if "mediafire.com" in domain: + return mediafire(link) + if "osdn.net" in domain: + return osdn(link) + if "github.com" in domain: + return github(link) + if "hxfile.co" in domain: + return hxfile(link) + if "1drv.ms" in domain: + return onedrive(link) + if "pixeldrain.com" in domain: + return pixeldrain(link) + if "racaty" in domain: + return racaty(link) + if "1fichier.com" in domain: + return fichier(link) + if "solidfiles.com" in domain: + return solidfiles(link) + if "krakenfiles.com" in domain: + return krakenfiles(link) + if "upload.ee" in domain: + return uploadee(link) + if "gofile.io" in domain: + return gofile(link) + if "send.cm" in domain: + return send_cm(link) + if "tmpsend.com" in domain: + return tmpsend(link) + if "easyupload.io" in domain: + return easyupload(link) + if "streamvid.net" in domain: + return streamvid(link) + if "shrdsk.me" in domain: + return shrdsk(link) + if "u.pcloud.link" in domain: + return pcloud(link) + if "qiwi.gg" in domain: + return qiwi(link) + if "mp4upload.com" in domain: + return mp4upload(link) + if "berkasdrive.com" in domain: + return berkasdrive(link) + if any(x in domain for x in ["akmfiles.com", "akmfls.xyz"]): + return akmfiles(link) + if any( + x in domain + for x in [ + "dood.watch", + "doodstream.com", + "dood.to", + "dood.so", + "dood.cx", + "dood.la", + "dood.ws", + "dood.sh", + "doodstream.co", + "dood.pm", + "dood.wf", + "dood.re", + "dood.video", + "dooood.com", + "dood.yt", + "doods.yt", + "dood.stream", + "doods.pro", + "ds2play.com", + "d0o0d.com", + "ds2video.com", + "do0od.com", + "d000d.com", + ] + ): + return doods(link) + if any( + x in domain + for x in [ + "streamtape.com", + "streamtape.co", + "streamtape.cc", + "streamtape.to", + "streamtape.net", + "streamta.pe", + "streamtape.xyz", + ] + ): + return streamtape(link) + if any(x in domain for x in ["wetransfer.com", "we.tl"]): + return wetransfer(link) + if any( + x in domain + for x in [ + "terabox.com", + "nephobox.com", + "4funbox.com", + "mirrobox.com", + "momerybox.com", + "teraboxapp.com", + "1024tera.com", + "terabox.app", + "gibibox.com", + "goaibox.com", + ] + ): + return async_to_sync(terabox, link) + if any( + x in domain + for x in [ + "filelions.co", + "filelions.site", + "filelions.live", + "filelions.to", + "cabecabean.lol", + "filelions.online", + "embedwish.com", + "streamwish.com", + "kitabmarkaz.xyz", + "wishfast.top", + "streamwish.to", + ] + ): + return filelions_and_streamwish(link) + if any(x in domain for x in ["streamhub.ink", "streamhub.to"]): + return streamhub(link) + if any( + x in domain + for x in [ + "linkbox.to", + "lbx.to", + "teltobx.net", + "telbx.net", + ] + ): + return linkBox(link) + if is_share_link(link): + if "gdtot" in domain: + return gdtot(link) + if "filepress" in domain: + return filepress(link) + return sharer_scraper(link) + raise DirectDownloadLinkException(f"No Direct link function found for {link}") def get_captcha_token(session, params): @@ -154,13 +208,15 @@ def mediafire(url, session=None): html = HTML(session.get(url).text) except Exception as e: session.close() - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if error := html.xpath('//p[@class="notranslate"]/text()'): session.close() - raise DirectDownloadLinkError(f"ERROR: {error[0]}") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") if not (final_link := html.xpath("//a[@id='downloadButton']/@href")): session.close() - raise DirectDownloadLinkError("ERROR: No links found in this page Try Again") + raise DirectDownloadLinkException( + "ERROR: No links found in this page Try Again" + ) if final_link[0].startswith("//"): return mediafire(f"https://{final_link[0][2:]}", session) session.close() @@ -172,87 +228,95 @@ def osdn(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if not (direct_link := html.xapth('//a[@class="mirror_link"]/@href')): - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") return f"https://osdn.net{direct_link[0]}" +def yandex_disk(url: str) -> str: + """Yandex.Disk direct link generator + Based on https://github.com/wldhx/yadisk-direct""" + try: + link = findall(r"\b(https?://(yadi\.sk|disk\.yandex\.(com|ru))\S+)", url)[0][ + 0 + ] + except IndexError: + return "No Yandex.Disk links found\n" + api = "https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key={}" + try: + return get(api.format(link)).json()["href"] + except KeyError as e: + raise DirectDownloadLinkException( + "ERROR: File not found/Download limit reached" + ) from e + + def github(url): + """GitHub direct links generator""" try: findall(r"\bhttps?://.*github\.com.*releases\S+", url)[0] - except IndexError: - raise DirectDownloadLinkError("No GitHub Releases links found") + except IndexError as e: + raise DirectDownloadLinkException("No GitHub Releases links found") from e with create_scraper() as session: _res = session.get(url, stream=True, allow_redirects=False) if "location" in _res.headers: return _res.headers["location"] - raise DirectDownloadLinkError("ERROR: Can't extract the link") + raise DirectDownloadLinkException("ERROR: Can't extract the link") def hxfile(url): - with create_scraper() as session: + if not ospath.isfile("hxfile.txt"): + raise DirectDownloadLinkException("ERROR: hxfile.txt (cookies) Not Found!") + try: + jar = MozillaCookieJar() + jar.load("hxfile.txt") + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + cookies = {cookie.name: cookie.value for cookie in jar} + with Session() as session: try: file_code = url.split("/")[-1] html = HTML( - session.post(url, data={"op": "download2", "id": file_code}).text + session.post( + url, + data={"op": "download2", "id": file_code}, + cookies=cookies, + ).text ) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - if direct_link := html.xpath('//a[@class="btn btn-dow"]/@href'): - return direct_link[0] - raise DirectDownloadLinkError("ERROR: Direct download link not found") - - -def filepress(url): - with create_scraper() as session: - try: - url = session.get(url).url - raw = urlparse(url) - json_data = { - "id": raw.path.split("/")[-1], - "method": "publicDownlaod", - } - api = f"{raw.scheme}://{raw.hostname}/api/file/downlaod/" - res2 = session.post( - api, - headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, - json=json_data, - ).json() - json_data2 = { - "id": res2["data"], - "method": "publicUserDownlaod", - } - api2 = "https://new2.filepress.store/api/file/downlaod2/" - res = session.post( - api2, - headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, - json=json_data2, - ).json() - except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - if "data" not in res: - raise DirectDownloadLinkError(f'ERROR: {res["statusText"]}') - return f'https://drive.google.com/uc?id={res["data"]}&export=download' + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e + if direct_link := html.xpath("//a[@class='btn btn-dow']/@href"): + header = f"Referer: {url}" + return direct_link[0], header + raise DirectDownloadLinkException("ERROR: Direct download link not found") def onedrive(link): + """Onedrive direct link generator + By https://github.com/junedkh""" with create_scraper() as session: try: link = session.get(link).url parsed_link = urlparse(link) link_data = parse_qs(parsed_link.query) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if not link_data: - raise DirectDownloadLinkError("ERROR: Unable to find link_data") + raise DirectDownloadLinkException("ERROR: Unable to find link_data") folder_id = link_data.get("resid") if not folder_id: - raise DirectDownloadLinkError("ERROR: folder id not found") + raise DirectDownloadLinkException("ERROR: folder id not found") folder_id = folder_id[0] authkey = link_data.get("authkey") if not authkey: - raise DirectDownloadLinkError("ERROR: authkey not found") + raise DirectDownloadLinkException("ERROR: authkey not found") authkey = authkey[0] boundary = uuid4() headers = {"content-type": f"multipart/form-data;boundary={boundary}"} @@ -264,13 +328,16 @@ def onedrive(link): data=data, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if "@content.downloadUrl" not in resp: - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") return resp["@content.downloadUrl"] def pixeldrain(url): + """Based on https://github.com/yash-dk/TorToolkit-Telegram""" url = url.strip("/ ") file_id = url.split("/")[-1] if url.split("/")[-2] == "l": @@ -283,10 +350,14 @@ def pixeldrain(url): try: resp = session.get(info_link).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if resp["success"]: return dl_link - raise DirectDownloadLinkError(f"ERROR: Cant't download due {resp['message']}.") + raise DirectDownloadLinkException( + f"ERROR: Cant't download due {resp['message']}." + ) def streamtape(url): @@ -296,11 +367,14 @@ def streamtape(url): with Session() as session: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - if not (script := html.xpath("//script[contains(text(),'ideoooolink')]/text()")): - raise DirectDownloadLinkError("ERROR: requeries script not found") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + script = html.xpath( + "//script[contains(text(),'ideoooolink')]/text()" + ) or html.xpath("//script[contains(text(),'ideoolink')]/text()") + if not script: + raise DirectDownloadLinkException("ERROR: requeries script not found") if not (link := findall(r"(&expires\S+)'", script[0])): - raise DirectDownloadLinkError("ERROR: Download link not found") + raise DirectDownloadLinkException("ERROR: Download link not found") return f"https://streamtape.com/get_video?id={_id}{link[-1]}" @@ -311,17 +385,22 @@ def racaty(url): json_data = {"op": "download2", "id": url.split("/")[-1]} html = HTML(session.post(url, data=json_data).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if direct_link := html.xpath("//a[@id='uniqueExpirylink']/@href"): return direct_link[0] - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") def fichier(link): + """1Fichier direct link generator + Based on https://github.com/Maujar + """ regex = r"^([http:\/\/|https:\/\/]+)?.*1fichier\.com\/\?.+" gan = match(regex, link) if not gan: - raise DirectDownloadLinkError("ERROR: The link you entered is wrong!") + raise DirectDownloadLinkException("ERROR: The link you entered is wrong!") if "::" in link: pswd = link.split("::")[-1] url = link.split("::")[-2] @@ -336,33 +415,33 @@ def fichier(link): pw = {"pass": pswd} req = cget("post", url, data=pw) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if req.status_code == 404: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: File not found/The link you entered is wrong!" ) html = HTML(req.text) if dl_url := html.xpath('//a[@class="ok btn-general btn-orange"]/@href'): return dl_url[0] if not (ct_warn := html.xpath('//div[@class="ct_warn"]')): - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Error trying to generate Direct Link from 1fichier!" ) if len(ct_warn) == 3: str_2 = ct_warn[-1].text if "you must wait" in str_2.lower(): if numbers := [int(word) for word in str_2.split() if word.isdigit()]: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute." ) - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: 1fichier is on a limit. Please wait a few minutes/hour." ) if "protect access" in str_2.lower(): - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(link)}" ) - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Failed to generate Direct Link from 1fichier!" ) if len(ct_warn) == 4: @@ -370,22 +449,25 @@ def fichier(link): str_3 = ct_warn[-1].text if "you must wait" in str_1.lower(): if numbers := [int(word) for word in str_1.split() if word.isdigit()]: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute." ) - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: 1fichier is on a limit. Please wait a few minutes/hour." ) if "bad password" in str_3.lower(): - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: The password you entered is wrong!" ) - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Error trying to generate Direct Link from 1fichier!" ) def solidfiles(url): + """Solidfiles direct link generator + Based on https://github.com/Xonshiz/SolidFiles-Downloader + By https://github.com/Jusidama18""" with create_scraper() as session: try: headers = { @@ -397,7 +479,9 @@ def solidfiles(url): ) return loads(mainOptions)["downloadUrl"] except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e def krakenfiles(url): @@ -405,24 +489,28 @@ def krakenfiles(url): try: _res = session.get(url) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e html = HTML(_res.text) if post_url := html.xpath('//form[@id="dl-form"]/@action'): - post_url = f"https:{post_url[0]}" + post_url = f"https://krakenfiles.com{post_url[0]}" else: - raise DirectDownloadLinkError("ERROR: Unable to find post link.") + raise DirectDownloadLinkException("ERROR: Unable to find post link.") if token := html.xpath('//input[@id="dl-token"]/@value'): data = {"token": token[0]} else: - raise DirectDownloadLinkError("ERROR: Unable to find token for post.") + raise DirectDownloadLinkException( + "ERROR: Unable to find token for post." + ) try: _json = session.post(post_url, data=data).json() except Exception as e: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: {e.__class__.__name__} While send post request" - ) + ) from e if _json["status"] != "ok": - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Unable to find download after post request" ) return _json["url"] @@ -433,84 +521,111 @@ def uploadee(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if link := html.xpath("//a[@id='d_l']/@href"): return link[0] - raise DirectDownloadLinkError("ERROR: Direct Link not found") - - -def terabox(url, video_quality="HD Video", save_dir="HD_Video"): - """Terabox direct link generator - https://github.com/Dawn-India/Z-Mirror""" - - pattern = r"/s/(\w+)|surl=(\w+)" - if not search(pattern, url): - raise DirectDownloadLinkError("ERROR: Invalid terabox URL") - - netloc = urlparse(url).netloc - terabox_url = url.replace(netloc, "1024tera.com") - - urls = [ - "https://ytshorts.savetube.me/api/v1/terabox-downloader", - f"https://teraboxvideodownloader.nepcoderdevs.workers.dev/?url={terabox_url}", - f"https://terabox.udayscriptsx.workers.dev/?url={terabox_url}", - f"https://mavimods.serv00.net/Mavialt.php?url={terabox_url}", - f"https://mavimods.serv00.net/Mavitera.php?url={terabox_url}", - ] + raise DirectDownloadLinkException("ERROR: Direct Link not found") + + +async def terabox(link: str): + async with ( + ClientSession() as session, + session.post( + "https://ytshorts.savetube.me/api/v1/terabox-downloader", + data={"url": f"{link}"}, + headers={ + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", + }, + ) as response, + ): + try: + response.raise_for_status() + json_response = await response.json() + return json_response["response"][0]["resolutions"]["HD Video"] + except Exception: + raise DirectDownloadLinkException("ERROR: Direct Link not found") - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0", - "Accept": "application/json, text/plain, */*", - "Accept-Language": "en-US,en;q=0.5", - "Content-Type": "application/json", - "Origin": "https://ytshorts.savetube.me", - "Alt-Used": "ytshorts.savetube.me", - "Sec-Fetch-Dest": "empty", - "Sec-Fetch-Mode": "cors", - "Sec-Fetch-Site": "same-origin", - } - for base_url in urls: +def filepress(url): + with create_scraper() as session: try: - if "api/v1" in base_url: - response = post(base_url, headers=headers, json={"url": terabox_url}) - else: - response = get(base_url) - - if response.status_code == 200: - break + url = session.get(url).url + raw = urlparse(url) + json_data = { + "id": raw.path.split("/")[-1], + "method": "publicDownlaod", + } + api = f"{raw.scheme}://{raw.hostname}/api/file/downlaod/" + res2 = session.post( + api, + headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, + json=json_data, + ).json() + json_data2 = { + "id": res2["data"], + "method": "publicUserDownlaod", + } + api2 = "https://new2.filepress.store/api/file/downlaod2/" + res = session.post( + api2, + headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, + json=json_data2, + ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") from e - else: - raise DirectDownloadLinkError("ERROR: Unable to fetch the JSON data") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e + if "data" not in res: + raise DirectDownloadLinkException(f'ERROR: {res["statusText"]}') + return f'https://drive.google.com/uc?id={res["data"]}&export=download' - data = response.json() - details = {"contents": [], "title": "", "total_size": 0} - for item in data["response"]: - title = item["title"] - resolutions = item.get("resolutions", {}) - links = resolutions.get(video_quality) - if links: - details["contents"].append( - { - "url": links, - "filename": title, - "path": path.join(title, save_dir), - } +def gdtot(url): + cget = create_scraper().request + try: + res = cget("GET", f'https://gdtot.pro/file/{url.split("/")[-1]}') + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + token_url = HTML(res.text).xpath( + "//a[contains(@class,'inline-flex items-center justify-center')]/@href" + ) + if not token_url: + try: + url = cget("GET", url).url + p_url = urlparse(url) + res = cget( + "GET", f"{p_url.scheme}://{p_url.hostname}/ddl/{url.split('/')[-1]}" ) - details["title"] = title - - if not details["contents"]: - raise DirectDownloadLinkError("ERROR: No valid download links found") - - if len(details["contents"]) == 1: - return details["contents"][0]["url"] - - return details - - -def appflix(url): + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e + if ( + drive_link := findall(r"myDl\('(.*?)'\)", res.text) + ) and "drive.google.com" in drive_link[0]: + return drive_link[0] + raise DirectDownloadLinkException( + "ERROR: Drive Link not found, Try in your broswer" + ) + token_url = token_url[0] + try: + token_page = cget("GET", token_url) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} with {token_url}" + ) from e + path = findall(r'\("(.*?)"\)', token_page.text) + if not path: + raise DirectDownloadLinkException("ERROR: Cannot bypass this") + path = path[0] + raw = urlparse(token_url) + final_url = f"{raw.scheme}://{raw.hostname}{path}" + return sharer_scraper(final_url) + + +def sharer_scraper(url): cget = create_scraper().request try: url = cget("GET", url).url @@ -520,13 +635,13 @@ def appflix(url): } res = cget("GET", url, headers=header) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e key = findall(r'"key",\s+"(.*?)"', res.text) if not key: - raise DirectDownloadLinkError("ERROR: Key not found!") + raise DirectDownloadLinkException("ERROR: Key not found!") key = key[0] if not HTML(res.text).xpath("//button[@id='drc']"): - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: This link don't have direct download button" ) boundary = uuid4() @@ -547,9 +662,9 @@ def appflix(url): "POST", url, cookies=res.cookies, headers=headers, data=data ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if "url" not in res: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Drive Link not found, Try in your broswer" ) if "drive.google.com" in res["url"]: @@ -557,12 +672,14 @@ def appflix(url): try: res = cget("GET", res["url"]) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if ( drive_link := HTML(res.text).xpath("//a[contains(@class,'btn')]/@href") ) and "drive.google.com" in drive_link[0]: return drive_link[0] - raise DirectDownloadLinkError("ERROR: Drive Link not found, Try in your broswer") + raise DirectDownloadLinkException( + "ERROR: Drive Link not found, Try in your broswer" + ) def wetransfer(url): @@ -579,14 +696,16 @@ def wetransfer(url): json=json_data, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if "direct_link" in res: return res["direct_link"] if "message" in res: - raise DirectDownloadLinkError(f"ERROR: {res['message']}") + raise DirectDownloadLinkException(f"ERROR: {res['message']}") if "error" in res: - raise DirectDownloadLinkError(f"ERROR: {res['error']}") - raise DirectDownloadLinkError("ERROR: cannot find direct link") + raise DirectDownloadLinkException(f"ERROR: {res['error']}") + raise DirectDownloadLinkException("ERROR: cannot find direct link") def akmfiles(url): @@ -594,26 +713,31 @@ def akmfiles(url): try: html = HTML( session.post( - url, data={"op": "download2", "id": url.split("/")[-1]} + url, + data={"op": "download2", "id": url.split("/")[-1]}, ).text ) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if direct_link := html.xpath("//a[contains(@class,'btn btn-dow')]/@href"): return direct_link[0] - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") def shrdsk(url): with create_scraper() as session: try: _json = session.get( - f'https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split("/")[-1]}' + f'https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split("/")[-1]}', ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if "download_data" not in _json: - raise DirectDownloadLinkError("ERROR: Download data not found") + raise DirectDownloadLinkException("ERROR: Download data not found") try: _res = session.get( f"https://shrdsk.me/download/{_json['download_data']}", @@ -622,8 +746,10 @@ def shrdsk(url): if "Location" in _res.headers: return _res.headers["Location"] except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - raise DirectDownloadLinkError("ERROR: cannot find direct link in headers") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e + raise DirectDownloadLinkException("ERROR: cannot find direct link in headers") def linkBox(url: str): @@ -631,25 +757,28 @@ def linkBox(url: str): try: shareToken = parsed_url.path.split("/")[-1] except Exception: - raise DirectDownloadLinkError("ERROR: invalid URL") + raise DirectDownloadLinkException("ERROR: invalid URL") details = {"contents": [], "title": "", "total_size": 0} def __singleItem(session, itemId): try: _json = session.get( - "https://www.linkbox.to/api/file/detail", params={"itemId": itemId} + "https://www.linkbox.to/api/file/detail", + params={"itemId": itemId}, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e data = _json["data"] if not data: if "msg" in _json: - raise DirectDownloadLinkError(f"ERROR: {_json['msg']}") - raise DirectDownloadLinkError("ERROR: data not found") + raise DirectDownloadLinkException(f"ERROR: {_json['msg']}") + raise DirectDownloadLinkException("ERROR: data not found") itemInfo = data["itemInfo"] if not itemInfo: - raise DirectDownloadLinkError("ERROR: itemInfo not found") + raise DirectDownloadLinkException("ERROR: itemInfo not found") filename = itemInfo["name"] sub_type = itemInfo.get("sub_type") if sub_type and not filename.endswith(sub_type): @@ -676,17 +805,21 @@ def __fetch_links(session, _id=0, folderPath=""): } try: _json = session.get( - "https://www.linkbox.to/api/file/share_out_list", params=params + "https://www.linkbox.to/api/file/share_out_list", + params=params, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e data = _json["data"] if not data: if "msg" in _json: - raise DirectDownloadLinkError(f"ERROR: {_json['msg']}") - raise DirectDownloadLinkError("ERROR: data not found") - if data["shareType"] == "singleItem": - return __singleItem(session, data["itemId"]) + raise DirectDownloadLinkException(f"ERROR: {_json['msg']}") + raise DirectDownloadLinkException("ERROR: data not found") + with suppress(Exception): + if data["shareType"] == "singleItem": + return __singleItem(session, data["itemId"]) if not details["title"]: details["title"] = data["dirName"] contents = data["list"] @@ -695,9 +828,9 @@ def __fetch_links(session, _id=0, folderPath=""): for content in contents: if content["type"] == "dir" and "url" not in content: if not folderPath: - newFolderPath = path.join(details["title"], content["name"]) + newFolderPath = ospath.join(details["title"], content["name"]) else: - newFolderPath = path.join(folderPath, content["name"]) + newFolderPath = ospath.join(folderPath, content["name"]) if not details["title"]: details["title"] = content["name"] __fetch_links(session, content["id"], newFolderPath) @@ -710,7 +843,7 @@ def __fetch_links(session, _id=0, folderPath=""): ): filename += f".{sub_type}" item = { - "path": path.join(folderPath), + "path": ospath.join(folderPath), "filename": filename, "url": content["url"], } @@ -725,7 +858,7 @@ def __fetch_links(session, _id=0, folderPath=""): try: with Session() as session: __fetch_links(session) - except DirectDownloadLinkError as e: + except DirectDownloadLinkException as e: raise e return details @@ -740,7 +873,7 @@ def gofile(url): _password = "" _id = url.split("/")[-1] except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") def __get_token(session): headers = { @@ -753,7 +886,7 @@ def __get_token(session): try: __res = session.post(__url, headers=headers).json() if __res["status"] != "ok": - raise DirectDownloadLinkError("ERROR: Failed to get token.") + raise DirectDownloadLinkException("ERROR: Failed to get token.") return __res["data"]["token"] except Exception as e: raise e @@ -772,17 +905,19 @@ def __fetch_links(session, _id, folderPath=""): try: _json = session.get(_url, headers=headers).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") if _json["status"] in "error-passwordRequired": - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}" ) if _json["status"] in "error-passwordWrong": - raise DirectDownloadLinkError("ERROR: This password is wrong !") + raise DirectDownloadLinkException("ERROR: This password is wrong !") if _json["status"] in "error-notFound": - raise DirectDownloadLinkError("ERROR: File not found on gofile's server") + raise DirectDownloadLinkException( + "ERROR: File not found on gofile's server" + ) if _json["status"] in "error-notPublic": - raise DirectDownloadLinkError("ERROR: This folder is not public") + raise DirectDownloadLinkException("ERROR: This folder is not public") data = _json["data"] @@ -795,15 +930,15 @@ def __fetch_links(session, _id, folderPath=""): if not content["public"]: continue if not folderPath: - newFolderPath = path.join(details["title"], content["name"]) + newFolderPath = ospath.join(details["title"], content["name"]) else: - newFolderPath = path.join(folderPath, content["name"]) + newFolderPath = ospath.join(folderPath, content["name"]) __fetch_links(session, content["id"], newFolderPath) else: if not folderPath: folderPath = details["title"] item = { - "path": path.join(folderPath), + "path": ospath.join(folderPath), "filename": content["name"], "url": content["link"], } @@ -819,12 +954,12 @@ def __fetch_links(session, _id, folderPath=""): try: token = __get_token(session) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") details["header"] = f"Cookie: accountToken={token}" try: __fetch_links(session, _id) except Exception as e: - raise DirectDownloadLinkError(e) + raise DirectDownloadLinkException(e) if len(details["contents"]) == 1: return (details["contents"][0]["url"], details["header"]) @@ -837,12 +972,12 @@ def mediafireFolder(url): folderkey = raw.split("/", 1)[0] folderkey = folderkey.split(",") except Exception: - raise DirectDownloadLinkError("ERROR: Could not parse ") + raise DirectDownloadLinkException("ERROR: Could not parse ") if len(folderkey) == 1: folderkey = folderkey[0] details = {"contents": [], "title": "", "total_size": 0, "header": ""} - session = req_session() + session = Session() adapter = HTTPAdapter( max_retries=Retry(total=10, read=10, connect=10, backoff_factor=0.3) ) @@ -868,7 +1003,7 @@ def __get_info(folderkey): }, ).json() except Exception as e: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: {e.__class__.__name__} While getting info" ) _res = _json["response"] @@ -877,14 +1012,14 @@ def __get_info(folderkey): elif "folder_info" in _res: folder_infos.append(_res["folder_info"]) elif "message" in _res: - raise DirectDownloadLinkError(f"ERROR: {_res['message']}") + raise DirectDownloadLinkException(f"ERROR: {_res['message']}") else: - raise DirectDownloadLinkError("ERROR: something went wrong!") + raise DirectDownloadLinkException("ERROR: something went wrong!") try: __get_info(folderkey) except Exception as e: - raise DirectDownloadLinkError(e) + raise DirectDownloadLinkException(e) details["title"] = folder_infos[0]["name"] @@ -909,20 +1044,20 @@ def __get_content(folderKey, folderPath="", content_type="folders"): params=params, ).json() except Exception as e: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: {e.__class__.__name__} While getting content" ) _res = _json["response"] if "message" in _res: - raise DirectDownloadLinkError(f"ERROR: {_res['message']}") + raise DirectDownloadLinkException(f"ERROR: {_res['message']}") _folder_content = _res["folder_content"] if content_type == "folders": folders = _folder_content["folders"] for folder in folders: if folderPath: - newFolderPath = path.join(folderPath, folder["name"]) + newFolderPath = ospath.join(folderPath, folder["name"]) else: - newFolderPath = path.join(folder["name"]) + newFolderPath = ospath.join(folder["name"]) __get_content(folder["folderkey"], newFolderPath) __get_content(folderKey, folderPath, "files") else: @@ -934,7 +1069,7 @@ def __get_content(folderKey, folderPath="", content_type="folders"): item["filename"] = file["filename"] if not folderPath: folderPath = details["title"] - item["path"] = path.join(folderPath) + item["path"] = ospath.join(folderPath) item["url"] = _url if "size" in file: size = file["size"] @@ -947,7 +1082,7 @@ def __get_content(folderKey, folderPath="", content_type="folders"): for folder in folder_infos: __get_content(folder["folderkey"], folder["name"]) except Exception as e: - raise DirectDownloadLinkError(e) + raise DirectDownloadLinkException(e) finally: session.close() if len(details["contents"]) == 1: @@ -968,7 +1103,7 @@ def cf_bypass(url): return _json["solution"]["response"] except Exception as e: e - raise DirectDownloadLinkError("ERROR: Con't bypass cloudflare") + raise DirectDownloadLinkException("ERROR: Con't bypass cloudflare") def send_cm_file(url, file_id=None): @@ -983,11 +1118,13 @@ def send_cm_file(url, file_id=None): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if html.xpath("//input[@name='password']"): _passwordNeed = True if not (file_id := html.xpath("//input[@name='id']/@value")): - raise DirectDownloadLinkError("ERROR: file_id not found") + raise DirectDownloadLinkException("ERROR: file_id not found") try: data = {"op": "download2", "id": file_id} if _password and _passwordNeed: @@ -996,12 +1133,14 @@ def send_cm_file(url, file_id=None): if "Location" in _res.headers: return (_res.headers["Location"], "Referer: https://send.cm/") except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if _passwordNeed: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}" ) - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") def send_cm(url): @@ -1040,7 +1179,7 @@ def __collectFolders(html): return folders def __getFile_link(file_id): - try: + with suppress(Exception): _res = session.post( "https://send.cm/", data={"op": "download2", "id": file_id}, @@ -1048,8 +1187,7 @@ def __getFile_link(file_id): ) if "Location" in _res.headers: return _res.headers["Location"] - except Exception: - pass + return None def __getFiles(html): files = [] @@ -1061,7 +1199,7 @@ def __getFiles(html): { "file_id": href.split("/")[-1], "file_name": file_name.strip(), - "size": text_to_bytes(size_text.strip()), + "size": speed_string_to_bytes(size_text.strip()), } ) return files @@ -1070,7 +1208,7 @@ def __writeContents(html_text, folderPath=""): folders = __collectFolders(html_text) for folder in folders: _html = HTML(cf_bypass(folder["folder_link"])) - __writeContents(_html, path.join(folderPath, folder["folder_name"])) + __writeContents(_html, ospath.join(folderPath, folder["folder_name"])) files = __getFiles(html_text) for file in files: if not (link := __getFile_link(file["file_id"])): @@ -1081,22 +1219,22 @@ def __writeContents(html_text, folderPath=""): try: mainHtml = HTML(cf_bypass(url)) - except DirectDownloadLinkError as e: + except DirectDownloadLinkException as e: session.close() raise e except Exception as e: session.close() - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: {e.__class__.__name__} While getting mainHtml" ) try: __writeContents(mainHtml, details["title"]) - except DirectDownloadLinkError as e: + except DirectDownloadLinkException as e: session.close() raise e except Exception as e: session.close() - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: {e.__class__.__name__} While writing Contents" ) session.close() @@ -1108,34 +1246,29 @@ def __writeContents(html_text, folderPath=""): def doods(url): if "/e/" in url: url = url.replace("/e/", "/d/") - api_url = f"https://api.pake.tk/dood?url={url}" - response = get(api_url) - if response.status_code != 200: - raise DirectDownloadLinkError("ERROR: Failed to fetch direct link from API") - json_data = response.json() - if direct_link := json_data.get("data", {}).get("direct_link"): - return f"https://dd-cdn.pakai.eu.org/download?url={direct_link}" - raise DirectDownloadLinkError("ERROR: Direct link not found in API response") - - -def hubdrive(url): - try: - rs = Session() - p_url = urlparse(url) - js_query = rs.post( - f"{p_url.scheme}://{p_url.hostname}/ajax.php?ajax=direct-download", - data={"id": str(url.split("/")[-1])}, - headers={"x-requested-with": "XMLHttpRequest"}, - ).json() - if str(js_query["code"]) == "200": - dlink = f"{p_url.scheme}://{p_url.hostname}{js_query['file']}" - res = rs.get(dlink) - soup = BeautifulSoup(res.text, "html.parser") - gd_data = soup.select('a[class="btn btn-primary btn-user"]') - gd_link = gd_data[0]["href"] - return gd_link - except Exception: - raise DirectDownloadLinkError("ERROR: Download link not found try again") + parsed_url = urlparse(url) + with create_scraper() as session: + try: + html = HTML(session.get(url).text) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While fetching token link" + ) from e + if not (link := html.xpath("//div[@class='download-content']//a/@href")): + raise DirectDownloadLinkException( + "ERROR: Token Link not found or maybe not allow to download! open in browser." + ) + link = f"{parsed_url.scheme}://{parsed_url.hostname}{link[0]}" + sleep(2) + try: + _res = session.get(link) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While fetching download link" + ) from e + if not (link := search(r"window\.open\('(\S+)'", _res.text)): + raise DirectDownloadLinkException("ERROR: Download link not found try again") + return (link.group(1), f"Referer: {parsed_url.scheme}://{parsed_url.hostname}/") def easyupload(url): @@ -1149,13 +1282,13 @@ def easyupload(url): try: _res = session.get(url) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") first_page_html = HTML(_res.text) if ( first_page_html.xpath("//h6[contains(text(),'Password Protected')]") and not _password ): - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}" ) if not ( @@ -1164,7 +1297,7 @@ def easyupload(url): _res.text, ) ): - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Failed to get server for EasyUpload Link" ) action_url = match.group() @@ -1179,7 +1312,7 @@ def easyupload(url): "cb": "c3o1vbaxbmwe", } if not (captcha_token := get_captcha_token(session, recaptcha_params)): - raise DirectDownloadLinkError("ERROR: Captcha token not found") + raise DirectDownloadLinkException("ERROR: Captcha token not found") try: data = { "type": "download-token", @@ -1190,19 +1323,21 @@ def easyupload(url): } json_resp = session.post(url=action_url, data=data).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if "download_link" in json_resp: return json_resp["download_link"] if "data" in json_resp: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: Failed to generate direct link due to {json_resp['data']}" ) - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: Failed to generate direct link from EasyUpload." ) -def filewish(url): +def filelions_and_streamwish(url): parsed_url = urlparse(url) hostname = parsed_url.hostname scheme = parsed_url.scheme @@ -1232,7 +1367,7 @@ def filewish(url): apiKey = config_dict["STREAMWISH_API"] apiUrl = "https://api.streamwish.com" if not apiKey: - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( f"ERROR: API is not provided get it from {scheme}://{hostname}" ) file_code = url.split("/")[-1] @@ -1249,12 +1384,14 @@ def filewish(url): params={"key": apiKey, "file_code": file_code, "hls": "1"}, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if _res["status"] != 200: - raise DirectDownloadLinkError(f"ERROR: {_res['msg']}") + raise DirectDownloadLinkException(f"ERROR: {_res['msg']}") result = _res["result"] if not result["versions"]: - raise DirectDownloadLinkError("ERROR: File Not Found") + raise DirectDownloadLinkException("ERROR: File Not Found") error = "\nProvide a quality to download the video\nAvailable Quality:" for version in result["versions"]: if quality == version["name"]: @@ -1268,10 +1405,10 @@ def filewish(url): elif version["name"] == "h": error += "\nHD" error += f"{url}_{version['name']}
" - raise DirectDownloadLinkError(f"ERROR: {error}") + raise DirectDownloadLinkException(f"ERROR: {error}") -def streamvid(url): +def streamvid(url: str): file_code = url.split("/")[-1] parsed_url = urlparse(url) url = f"{parsed_url.scheme}://{parsed_url.hostname}/d/{file_code}" @@ -1280,18 +1417,22 @@ def streamvid(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if quality_defined: data = {} if not (inputs := html.xpath('//form[@id="F1"]//input')): - raise DirectDownloadLinkError("ERROR: No inputs found") + raise DirectDownloadLinkException("ERROR: No inputs found") for i in inputs: if key := i.get("name"): data[key] = i.get("value") try: html = HTML(session.post(url, data=data).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if not ( script := html.xpath( '//script[contains(text(),"document.location.href")]/text()' @@ -1300,11 +1441,13 @@ def streamvid(url): if error := html.xpath( '//div[@class="alert alert-danger"][1]/text()[2]' ): - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - raise DirectDownloadLinkError("ERROR: direct link script not found!") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + raise DirectDownloadLinkException( + "ERROR: direct link script not found!" + ) if directLink := findall(r'document\.location\.href="(.*)"', script[0]): return directLink[0] - raise DirectDownloadLinkError( + raise DirectDownloadLinkException( "ERROR: direct link not found! in the script" ) if (qualities_urls := html.xpath('//div[@id="dl_versions"]/a/@href')) and ( @@ -1313,10 +1456,10 @@ def streamvid(url): error = "\nProvide a quality to download the video\nAvailable Quality:" for quality_url, quality in zip(qualities_urls, qualities): error += f"\n{quality.strip()}{quality_url}
" - raise DirectDownloadLinkError(f"ERROR: {error}") + raise DirectDownloadLinkException(f"ERROR: {error}") if error := html.xpath('//div[@class="not-found-text"]/text()'): - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - raise DirectDownloadLinkError("ERROR: Something went wrong") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + raise DirectDownloadLinkException("ERROR: Something went wrong") def streamhub(url): @@ -1327,9 +1470,11 @@ def streamhub(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if not (inputs := html.xpath('//form[@name="F1"]//input')): - raise DirectDownloadLinkError("ERROR: No inputs found") + raise DirectDownloadLinkException("ERROR: No inputs found") data = {} for i in inputs: if key := i.get("name"): @@ -1339,14 +1484,16 @@ def streamhub(url): try: html = HTML(session.post(url, data=data).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if directLink := html.xpath( '//a[@class="btn btn-primary btn-go downloadbtn"]/@href' ): return directLink[0] if error := html.xpath('//div[@class="alert alert-danger"]/text()[2]'): - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - raise DirectDownloadLinkError("ERROR: direct link not found!") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + raise DirectDownloadLinkException("ERROR: direct link not found!") def pcloud(url): @@ -1354,7 +1501,91 @@ def pcloud(url): try: res = session.get(url) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e if link := findall(r".downloadlink.:..(https:.*)..", res.text): return link[0].replace(r"\/", "/") - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") + + +def tmpsend(url): + parsed_url = urlparse(url) + if any(x in parsed_url.path for x in ["thank-you", "download"]): + query_params = parse_qs(parsed_url.query) + if file_id := query_params.get("d"): + file_id = file_id[0] + elif not (file_id := parsed_url.path.strip("/")): + raise DirectDownloadLinkException("ERROR: Invalid URL format") + referer_url = f"https://tmpsend.com/thank-you?d={file_id}" + header = f"Referer: {referer_url}" + download_link = f"https://tmpsend.com/download?d={file_id}" + return download_link, header + + +def qiwi(url): + """qiwi.gg link generator + based on https://github.com/aenulrofik""" + with Session() as session: + file_id = url.split("/")[-1] + try: + res = session.get(url).text + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e + tree = HTML(res) + if name := tree.xpath('//h1[@class="page_TextHeading__VsM7r"]/text()'): + ext = name[0].split(".")[-1] + return f"https://spyderrock.com/{file_id}.{ext}" + raise DirectDownloadLinkException("ERROR: File not found") + + +def mp4upload(url): + with Session() as session: + try: + url = url.replace("embed-", "") + req = session.get(url).text + tree = HTML(req) + inputs = tree.xpath("//input") + header = {"Referer": "https://www.mp4upload.com/"} + data = {input.get("name"): input.get("value") for input in inputs} + if not data: + raise DirectDownloadLinkException("ERROR: File Not Found!") + post = session.post( + url, + data=data, + headers={ + "User-Agent": user_agent, + "Referer": "https://www.mp4upload.com/", + }, + ).text + tree = HTML(post) + inputs = tree.xpath('//form[@name="F1"]//input') + data = { + input.get("name"): input.get("value").replace(" ", "") + for input in inputs + } + if not data: + raise DirectDownloadLinkException("ERROR: File Not Found!") + data["referer"] = url + direct_link = session.post(url, data=data).url + return direct_link, header + except Exception: + raise DirectDownloadLinkException("ERROR: File Not Found!") + + +def berkasdrive(url): + """berkasdrive.com link generator + by https://github.com/aenulrofik""" + with Session() as session: + try: + sesi = session.get(url).text + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}" + ) from e + html = HTML(sesi) + if link := html.xpath("//script")[0].text.split('"')[1]: + return b64decode(link).decode("utf-8") + raise DirectDownloadLinkException("ERROR: File Not Found!") diff --git a/bot/helper/mirror_leech_utils/download_utils/gd_download.py b/bot/helper/mirror_leech_utils/download_utils/gd_download.py index 4b8569bdc..1bb1f4ec7 100644 --- a/bot/helper/mirror_leech_utils/download_utils/gd_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/gd_download.py @@ -1,78 +1,59 @@ from secrets import token_hex -from bot import ( - LOGGER, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) +from bot import LOGGER, task_dict, non_queued_dl, task_dict_lock, queue_dict_lock from bot.helper.ext_utils.bot_utils import sync_to_async -from bot.helper.aeon_utils.nsfw_check import is_nsfw, is_nsfw_data from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) -from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper +from bot.helper.telegram_helper.message_utils import sendStatusMessage +from bot.helper.mirror_leech_utils.gdrive_utils.count import gdCount +from bot.helper.mirror_leech_utils.gdrive_utils.download import gdDownload from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus -async def add_gd_download(link, path, listener, newname): - drive = GoogleDriveHelper() - name, mime_type, size, _, _ = await sync_to_async(drive.count, link) +async def add_gd_download(listener, path): + drive = gdCount() + name, mime_type, listener.size, _, _ = await sync_to_async( + drive.count, listener.link, listener.userId + ) if mime_type is None: await listener.onDownloadError(name) return - id = drive.getIdFromUrl(link) - data = drive.getFilesByFolderId(id) - name = newname or name - gid = token_hex(4) - if is_nsfw(name) or is_nsfw_data(data): - await listener.onDownloadError("NSFW detected") - return + listener.name = listener.name or name + gid = token_hex(4) - msg, button = await stop_duplicate_check(name, listener) + msg, button = await stop_duplicate_check(listener) if msg: - await send_message(listener.message, msg, button) + await listener.onDownloadError(msg, button) return - if limit_exceeded := await limit_checker(size, listener, is_drive_link=True): - await listener.onDownloadError(limit_exceeded) - return - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, "dl" - ) + + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await sendStatusMessage(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False - - drive = GoogleDriveHelper(name, path, listener) - async with download_dict_lock: - download_dict[listener.uid] = GdriveStatus( - drive, size, listener.message, gid, "dl" - ) + if listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(listener.mid) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + drive = gdDownload(listener, path) + async with task_dict_lock: + task_dict[listener.mid] = GdriveStatus(listener, drive, gid, "dl") - if from_queue: - LOGGER.info(f"Start Queued Download from GDrive: {name}") + if add_to_queue: + LOGGER.info(f"Start Queued Download from GDrive: {listener.name}") else: - LOGGER.info(f"Download from GDrive: {name}") + LOGGER.info(f"Download from GDrive: {listener.name}") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await sendStatusMessage(listener.message) - await sync_to_async(drive.download, link) + await sync_to_async(drive.download) diff --git a/bot/helper/mirror_leech_utils/download_utils/mega_download.py b/bot/helper/mirror_leech_utils/download_utils/mega_download.py index 95ff4c840..0966bd383 100644 --- a/bot/helper/mirror_leech_utils/download_utils/mega_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/mega_download.py @@ -1,4 +1,3 @@ -# ruff: noqa: ARG002 from asyncio import Event from secrets import token_hex @@ -7,20 +6,16 @@ from bot import ( LOGGER, + task_dict, config_dict, - download_dict, non_queued_dl, + task_dict_lock, queue_dict_lock, - download_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - async_to_sync, - sync_to_async, - get_mega_link_type, ) +from bot.helper.ext_utils.bot_utils import async_to_sync, sync_to_async +from bot.helper.ext_utils.links_utils import get_mega_link_type from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage @@ -29,54 +24,56 @@ class MegaAppListener(MegaListener): - _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN, MegaRequest.TYPE_FETCH_NODES) NO_ERROR = "no error" + _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN, MegaRequest.TYPE_FETCH_NODES) def __init__(self, continue_event: Event, listener): + super().__init__() self.continue_event = continue_event + self.listener = listener self.node = None self.public_node = None - self.listener = listener self.is_cancelled = False self.error = None - self.__bytes_transferred = 0 - self.__speed = 0 - self.__name = "" - super().__init__() + self._bytes_transferred = 0 + self._speed = 0 + self._name = "" @property def speed(self): - return self.__speed + return self._speed @property def downloaded_bytes(self): - return self.__bytes_transferred + return self._bytes_transferred def onRequestFinish(self, api, request, error): - if str(error).lower() != "no error": + if str(error).lower() != MegaAppListener.NO_ERROR: self.error = error.copy() LOGGER.error(f"Mega onRequestFinishError: {self.error}") self.continue_event.set() return + request_type = request.getType() if request_type == MegaRequest.TYPE_LOGIN: api.fetchNodes() elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE: self.public_node = request.getPublicMegaNode() - self.__name = self.public_node.getName() + self._name = self.public_node.getName() elif request_type == MegaRequest.TYPE_FETCH_NODES: LOGGER.info("Fetching Root Node.") self.node = api.getRootNode() - self.__name = self.node.getName() + self._name = self.node.getName() LOGGER.info(f"Node Name: {self.node.getName()}") + if ( - request_type not in self._NO_EVENT_ON + request_type not in MegaAppListener._NO_EVENT_ON or self.node - and "cloud drive" not in self.__name.lower() + and "cloud drive" not in self._name.lower() ): self.continue_event.set() - def onRequestTemporaryError(self, _, request, error: MegaError): + def onRequestTemporaryError(self, api, request, error: MegaError): LOGGER.error(f"Mega Request error in {error}") if not self.is_cancelled: self.is_cancelled = True @@ -92,15 +89,15 @@ def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer): api.cancelTransfer(transfer, None) self.continue_event.set() return - self.__speed = transfer.getSpeed() - self.__bytes_transferred = transfer.getTransferredBytes() + self._speed = transfer.getSpeed() + self._bytes_transferred = transfer.getTransferredBytes() def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error): try: if self.is_cancelled: self.continue_event.set() elif transfer.isFinished() and ( - transfer.isFolderTransfer() or transfer.getFileName() == self.__name + transfer.isFolderTransfer() or transfer.getFileName() == self._name ): async_to_sync(self.listener.on_download_complete) self.continue_event.set() @@ -124,22 +121,21 @@ def onTransferTemporaryError(self, api, transfer, error): ) self.continue_event.set() - async def cancel_download(self): - self.is_cancelled = True - await self.listener.onDownloadError("Download Canceled by user") - class AsyncExecutor: def __init__(self): self.continue_event = Event() - async def do(self, function, args): + async def do(self, function, *args): self.continue_event.clear() await sync_to_async(function, *args) await self.continue_event.wait() -async def add_mega_download(mega_link, path, listener, name): +async def add_mega_download(listener, path): + mega_link = listener.link + name = listener.name + listener.name = listener.name MEGA_EMAIL = config_dict["MEGA_EMAIL"] MEGA_PASSWORD = config_dict["MEGA_PASSWORD"] @@ -151,65 +147,60 @@ async def add_mega_download(mega_link, path, listener, name): api.addListener(mega_listener) if MEGA_EMAIL and MEGA_PASSWORD: - await executor.do(api.login, (MEGA_EMAIL, MEGA_PASSWORD)) + await executor.do(api.login, MEGA_EMAIL, MEGA_PASSWORD) if get_mega_link_type(mega_link) == "file": - await executor.do(api.getPublicNode, (mega_link,)) + await executor.do(api.getPublicNode, mega_link) node = mega_listener.public_node else: folder_api = MegaApi(None, None, None, "aeon") folder_api.addListener(mega_listener) - await executor.do(folder_api.loginToFolder, (mega_link,)) + await executor.do(folder_api.loginToFolder, mega_link) node = await sync_to_async(folder_api.authorizeNode, mega_listener.node) - if mega_listener.error is not None: + + if mega_listener.error: await send_message(listener.message, str(mega_listener.error)) - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + await executor.do(api.logout) + if folder_api: + await executor.do(folder_api.logout) return name = name or node.getName() - msg, button = await stop_duplicate_check(name, listener) + msg, button = await stop_duplicate_check(listener) if msg: await send_message(listener.message, msg, button) - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + await executor.do(api.logout) + if folder_api: + await executor.do(folder_api.logout) return gid = token_hex(4) size = api.getSize(node) - if limit_exceeded := await limit_checker(size, listener, is_mega=True): - await listener.onDownloadError(limit_exceeded) - return - - added_to_queue, event = await is_queued(listener.uid) + added_to_queue, event = await check_running_tasks(listener, "dl") if added_to_queue: LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, "Dl" - ) + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() await sendStatusMessage(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + async with task_dict_lock: + if listener.mid not in task_dict: + await executor.do(api.logout) + if folder_api: + await executor.do(folder_api.logout) return from_queue = True LOGGER.info(f"Start Queued Download from Mega: {name}") else: from_queue = False - async with download_dict_lock: - download_dict[listener.uid] = MegaDownloadStatus( - name, size, gid, mega_listener, listener.message + async with task_dict_lock: + task_dict[listener.mid] = MegaDownloadStatus( + listener, name, size, gid, mega_listener ) async with queue_dict_lock: - non_queued_dl.add(listener.uid) + non_queued_dl.add(listener.mid) if from_queue: LOGGER.info(f"Start Queued Download from Mega: {name}") @@ -219,7 +210,7 @@ async def add_mega_download(mega_link, path, listener, name): LOGGER.info(f"Download from Mega: {name}") await makedirs(path, exist_ok=True) - await executor.do(api.startDownload, (node, path, name, None, False, None)) - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + await executor.do(api.startDownload, node, path, name, None, False, None) + await executor.do(api.logout) + if folder_api: + await executor.do(folder_api.logout) diff --git a/bot/helper/mirror_leech_utils/download_utils/qbit_download.py b/bot/helper/mirror_leech_utils/download_utils/qbit_download.py index 2aaa762d2..0a8bf0df5 100644 --- a/bot/helper/mirror_leech_utils/download_utils/qbit_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/qbit_download.py @@ -1,19 +1,19 @@ -from time import time +from asyncio import sleep from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove from bot import ( LOGGER, + task_dict, config_dict, xnox_client, - download_dict, non_queued_dl, + task_dict_lock, queue_dict_lock, - download_dict_lock, ) from bot.helper.ext_utils.bot_utils import sync_to_async, bt_selection_buttons -from bot.helper.ext_utils.task_manager import is_queued +from bot.helper.ext_utils.task_manager import check_running_tasks from bot.helper.listeners.qbit_listener import on_download_start from bot.helper.telegram_helper.message_utils import ( send_message, @@ -23,74 +23,68 @@ from bot.helper.mirror_leech_utils.status_utils.qbit_status import QbittorrentStatus -async def add_qb_torrent(link, path, listener, ratio, seed_time): - ADD_TIME = time() +async def add_qb_torrent(listener, path, ratio, seed_time): try: - url = link + url = listener.link tpath = None - if await aiopath.exists(link): + if await aiopath.exists(listener.link): url = None - tpath = link - added_to_queue, event = await is_queued(listener.uid) + tpath = listener.link + add_to_queue, event = await check_running_tasks(listener) op = await sync_to_async( xnox_client.torrents_add, url, tpath, path, - is_paused=added_to_queue, - tags=f"{listener.uid}", + is_paused=add_to_queue, + tags=f"{listener.mid}", ratio_limit=ratio, seeding_time_limit=seed_time, headers={"user-agent": "Wget/1.12"}, ) if op.lower() == "ok.": tor_info = await sync_to_async( - xnox_client.torrents_info, tag=f"{listener.uid}" + xnox_client.torrents_info, tag=f"{listener.mid}" ) if len(tor_info) == 0: while True: tor_info = await sync_to_async( - xnox_client.torrents_info, tag=f"{listener.uid}" + xnox_client.torrents_info, tag=f"{listener.mid}" ) if len(tor_info) > 0: break - if time() - ADD_TIME >= 120: - await listener.onDownloadError( - "Not added! Check if the link is valid or not. If it's torrent file then report, this happens if torrent file size above 10mb." - ) - return + await sleep(1) tor_info = tor_info[0] + listener.name = tor_info.name ext_hash = tor_info.hash else: await listener.onDownloadError( - "This Torrent already added or unsupported/invalid link/file." + "This Torrent already added or unsupported/invalid link/file.", ) return - async with download_dict_lock: - download_dict[listener.uid] = QbittorrentStatus( - listener, queued=added_to_queue + async with task_dict_lock: + task_dict[listener.mid] = QbittorrentStatus( + listener, queued=add_to_queue ) - await on_download_start(f"{listener.uid}") + await on_download_start(f"{listener.mid}") - if added_to_queue: + if add_to_queue: LOGGER.info( f"Added to Queue/Download: {tor_info.name} - Hash: {ext_hash}" ) else: - async with queue_dict_lock: - non_queued_dl.add(listener.uid) LOGGER.info(f"QbitDownload started: {tor_info.name} - Hash: {ext_hash}") await listener.on_download_start() if config_dict["BASE_URL"] and listener.select: - if link.startswith("magnet:"): + if listener.link.startswith("magnet:"): metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait." meta = await send_message(listener.message, metamsg) while True: tor_info = await sync_to_async( - xnox_client.torrents_info, tag=f"{listener.uid}" + xnox_client.torrents_info, tag=f"{listener.mid}" ) if len(tor_info) == 0: await delete_message(meta) @@ -109,33 +103,31 @@ async def add_qb_torrent(link, path, listener, ratio, seed_time): return ext_hash = tor_info.hash - if not added_to_queue: + if not add_to_queue: await sync_to_async( xnox_client.torrents_pause, torrent_hashes=ext_hash ) - s_buttons = bt_selection_buttons(ext_hash) + SBUTTONS = bt_selection_buttons(ext_hash) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." - await send_message(listener.message, msg, s_buttons) - else: + await send_message(listener.message, msg, SBUTTONS) + elif listener.multi <= 1: await sendStatusMessage(listener.message) - if added_to_queue: + if add_to_queue: await event.wait() - - async with download_dict_lock: - if listener.uid not in download_dict: - return - download_dict[listener.uid].queued = False + if listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(listener.mid) + async with task_dict_lock: + task_dict[listener.mid].queued = False await sync_to_async(xnox_client.torrents_resume, torrent_hashes=ext_hash) LOGGER.info( f"Start Queued Download from Qbittorrent: {tor_info.name} - Hash: {ext_hash}" ) - - async with queue_dict_lock: - non_queued_dl.add(listener.uid) except Exception as e: - await send_message(listener.message, str(e)) + await listener.onDownloadError(f"{e}") finally: - if await aiopath.exists(link): - await aioremove(link) + if tpath and await aiopath.exists(listener.link): + await remove(listener.link) diff --git a/bot/helper/mirror_leech_utils/download_utils/rclone_download.py b/bot/helper/mirror_leech_utils/download_utils/rclone_download.py index 03d185322..75d89b57c 100644 --- a/bot/helper/mirror_leech_utils/download_utils/rclone_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/rclone_download.py @@ -2,27 +2,30 @@ from asyncio import gather from secrets import token_hex -from bot import ( - LOGGER, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) +from bot import LOGGER, task_dict, non_queued_dl, task_dict_lock, queue_dict_lock from bot.helper.ext_utils.bot_utils import cmd_exec -from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check -from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage +from bot.helper.ext_utils.task_manager import ( + check_running_tasks, + stop_duplicate_check, +) +from bot.helper.telegram_helper.message_utils import sendStatusMessage from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus -async def add_rclone_download(rc_path, config_path, path, name, listener): - remote, rc_path = rc_path.split(":", 1) - rc_path = rc_path.strip("/") +async def add_rclone_download(listener, path): + if listener.link.startswith("mrcc:"): + listener.link = listener.link.split("mrcc:", 1)[1] + config_path = f"rclone/{listener.userId}.conf" + else: + config_path = "rclone.conf" + + remote, listener.link = listener.link.split(":", 1) + listener.link = listener.link.strip("/") cmd1 = [ - "rclone", + "xone", "lsjson", "--fast-list", "--stat", @@ -30,74 +33,76 @@ async def add_rclone_download(rc_path, config_path, path, name, listener): "--no-modtime", "--config", config_path, - f"{remote}:{rc_path}", + f"{remote}:{listener.link}", ] cmd2 = [ - "rclone", + "xone", "size", "--fast-list", "--json", "--config", config_path, - f"{remote}:{rc_path}", + f"{remote}:{listener.link}", ] res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2)) if res1[2] != res2[2] != 0: if res1[2] != -9: - err = res1[1] or res2[1] - msg = f"Error: While getting rclone stat/size. Path: {remote}:{rc_path}. Stderr: {err[:4000]}" - await send_message(listener.message, msg) + err = ( + res1[1] + or res2[1] + or "Use/shell cat rlog.txt
to see more information" + ) + msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {err[:4000]}" + await listener.onDownloadError(msg) return try: rstat = loads(res1[0]) rsize = loads(res2[0]) except Exception as err: - await send_message(listener.message, f"RcloneDownload JsonLoad: {err}") + if not str(err): + err = "Use/shell cat rlog.txt
to see more information" + await listener.onDownloadError(f"RcloneDownload JsonLoad: {err}") return if rstat["IsDir"]: - if not name: - name = rc_path.rsplit("/", 1)[-1] if rc_path else remote - path += name + if not listener.name: + listener.name = ( + listener.link.rsplit("/", 1)[-1] if listener.link else remote + ) + path += listener.name else: - name = rc_path.rsplit("/", 1)[-1] - size = rsize["bytes"] + listener.name = listener.link.rsplit("/", 1)[-1] + listener.size = rsize["bytes"] gid = token_hex(4) - msg, button = await stop_duplicate_check(name, listener) + msg, button = await stop_duplicate_check(listener) if msg: - await send_message(listener.message, msg, button) + await listener.onDownloadError(msg, button) return - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, "dl" - ) + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await sendStatusMessage(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False + if listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(listener.mid) - RCTransfer = RcloneTransferHelper(listener, name) - async with download_dict_lock: - download_dict[listener.uid] = RcloneStatus( - RCTransfer, listener.message, gid, "dl" - ) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + RCTransfer = RcloneTransferHelper(listener) + async with task_dict_lock: + task_dict[listener.mid] = RcloneStatus(listener, RCTransfer, gid, "dl") - if from_queue: - LOGGER.info(f"Start Queued Download with rclone: {rc_path}") + if add_to_queue: + LOGGER.info(f"Start Queued Download with rclone: {listener.link}") else: await listener.on_download_start() - await sendStatusMessage(listener.message) - LOGGER.info(f"Download with rclone: {rc_path}") + if listener.multi <= 1: + await sendStatusMessage(listener.message) + LOGGER.info(f"Download with rclone: {listener.link}") - await RCTransfer.download(remote, rc_path, config_path, path) + await RCTransfer.download(remote, config_path, path) diff --git a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py index ef05b9a1b..e2bc3d763 100644 --- a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py @@ -1,171 +1,163 @@ -import contextlib from time import time -from asyncio import Lock -from logging import ERROR, getLogger -from secrets import token_hex +from asyncio import Lock, sleep +from contextlib import suppress + +from pyrogram.errors import FloodWait from bot import ( LOGGER, - IS_PREMIUM_USER, bot, - user, - download_dict, + task_dict, non_queued_dl, + task_dict_lock, queue_dict_lock, - download_dict_lock, ) from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) -from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, - sendStatusMessage, -) +from bot.helper.telegram_helper.message_utils import sendStatusMessage from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.telegram_status import TelegramStatus global_lock = Lock() GLOBAL_GID = set() -getLogger("pyrogram").setLevel(ERROR) class TelegramDownloadHelper: def __init__(self, listener): - self.name = "" - self.__processed_bytes = 0 - self.__start_time = time() - self.__listener = listener - self.__id = "" - self.__is_cancelled = False + self._processed_bytes = 0 + self._start_time = time() + self._listener = listener + self._id = "" + self.session = "" @property def speed(self): - return self.__processed_bytes / (time() - self.__start_time) + return self._processed_bytes / (time() - self._start_time) @property def processed_bytes(self): - return self.__processed_bytes + return self._processed_bytes - async def __on_download_start(self, name, size, file_id, from_queue): + async def _on_download_start(self, file_id, from_queue): async with global_lock: GLOBAL_GID.add(file_id) - self.name = name - self.__id = file_id - gid = token_hex(4) - async with download_dict_lock: - download_dict[self.__listener.uid] = TelegramStatus( - self, size, self.__listener.message, gid, "dl" + self._id = file_id + async with task_dict_lock: + task_dict[self._listener.mid] = TelegramStatus( + self._listener, self, file_id[:12], "dl" ) - async with queue_dict_lock: - non_queued_dl.add(self.__listener.uid) if not from_queue: - await self.__listener.on_download_start() - await sendStatusMessage(self.__listener.message) - LOGGER.info(f"Download from Telegram: {name}") + await self._listener.on_download_start() + if self._listener.multi <= 1: + await sendStatusMessage(self._listener.message) + LOGGER.info(f"Download from Telegram: {self._listener.name}") else: - LOGGER.info(f"Start Queued Download from Telegram: {name}") + LOGGER.info( + f"Start Queued Download from Telegram: {self._listener.name}" + ) - async def __onDownloadProgress(self, current, _): - if self.__is_cancelled: - if IS_PREMIUM_USER: - user.stop_transmission() - else: - bot.stop_transmission() - self.__processed_bytes = current + async def _onDownloadProgress(self, current, total): + if self._listener.isCancelled: + self.session.stop_transmission() + self._processed_bytes = current - async def __on_download_error(self, error): + async def _onDownloadError(self, error): async with global_lock: - with contextlib.suppress(Exception): - GLOBAL_GID.remove(self.__id) - await self.__listener.onDownloadError(error) + with suppress(Exception): + GLOBAL_GID.remove(self._id) + await self._listener.onDownloadError(error) - async def __on_download_complete(self): - await self.__listener.on_download_complete() + async def _on_download_complete(self): + await self._listener.on_download_complete() async with global_lock: - GLOBAL_GID.remove(self.__id) + GLOBAL_GID.remove(self._id) - async def __download(self, message, path): + async def _download(self, message, path): try: download = await message.download( - file_name=path, progress=self.__onDownloadProgress + file_name=path, progress=self._onDownloadProgress ) - if self.__is_cancelled: - await self.__on_download_error("Cancelled by user!") + if self._listener.isCancelled: + await self._onDownloadError("Cancelled by user!") return + except FloodWait as f: + LOGGER.warning(str(f)) + await sleep(f.value) except Exception as e: LOGGER.error(str(e)) - await self.__on_download_error(str(e)) + await self._onDownloadError(str(e)) return if download is not None: - await self.__on_download_complete() - elif not self.__is_cancelled: - await self.__on_download_error("Internal error occurred") - - async def add_download(self, message, path, filename, session): - if session == "user": - if not self.__listener.isSuperGroup: - await send_message( - message, "Use SuperGroup to download this Link with User!" - ) - return - message = await user.get_messages( + await self._on_download_complete() + elif not self._listener.isCancelled: + await self._onDownloadError("Internal error occurred") + + async def add_download(self, message, path, session): + self.session = session if session else bot + if self.session != bot: + message = await self.session.get_messages( chat_id=message.chat.id, message_ids=message.id ) - - media = getattr(message, message.media.value) if message.media else None + media = ( + message.document + or message.photo + or message.video + or message.audio + or message.voice + or message.video_note + or message.sticker + or message.animation + or None + ) if media is not None: async with global_lock: download = media.file_unique_id not in GLOBAL_GID if download: - if filename == "": - name = media.file_name if hasattr(media, "file_name") else "None" + if self._listener.name == "": + self._listener.name = ( + media.file_name if hasattr(media, "file_name") else "None" + ) else: - name = filename - path = path + name - size = media.file_size + path = path + self._listener.name + self._listener.size = media.file_size gid = media.file_unique_id - msg, button = await stop_duplicate_check(name, self.__listener) + msg, button = await stop_duplicate_check(self._listener) if msg: - await send_message(self.__listener.message, msg, button) - await delete_links(self.__listener.message) - return - if limit_exceeded := await limit_checker(size, self.__listener): - await self.__listener.onDownloadError(limit_exceeded) - await delete_links(self.__listener.message) + await self._listener.onDownloadError(msg, button) return - added_to_queue, event = await is_queued(self.__listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[self.__listener.uid] = QueueStatus( - name, size, gid, self.__listener, "dl" + + add_to_queue, event = await check_running_tasks(self._listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {self._listener.name}") + async with task_dict_lock: + task_dict[self._listener.mid] = QueueStatus( + self._listener, gid, "dl" ) - await self.__listener.on_download_start() - await sendStatusMessage(self.__listener.message) + await self._listener.on_download_start() + if self._listener.multi <= 1: + await sendStatusMessage(self._listener.message) await event.wait() - async with download_dict_lock: - if self.__listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False - await self.__on_download_start(name, size, gid, from_queue) - await self.__download(message, path) + if self._listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(self._listener.mid) + + await self._on_download_start(gid, add_to_queue) + await self._download(message, path) else: - await self.__on_download_error("File already being downloaded!") + await self._onDownloadError("File already being downloaded!") else: - await self.__on_download_error( - "No valid media type in the replied message" + await self._onDownloadError( + "No document in the replied message! Use SuperGroup incase you are trying to download with User session!" ) - async def cancel_download(self): - self.__is_cancelled = True + async def cancel_task(self): + self._listener.isCancelled = True LOGGER.info( - f"Cancelling download via User: [ Name: {self.name} ID: {self.__id} ]" + f"Cancelling download on user request: name: {self._listener.name} id: {self._id}" ) diff --git a/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py b/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py index fe71b6379..f6b982f9f 100644 --- a/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py @@ -1,22 +1,21 @@ -import contextlib from os import path as ospath from os import listdir from re import search as re_search from logging import getLogger from secrets import token_hex +from contextlib import suppress from yt_dlp import YoutubeDL, DownloadError -from bot import download_dict, non_queued_dl, queue_dict_lock, download_dict_lock +from bot import task_dict, non_queued_dl, task_dict_lock, queue_dict_lock from bot.helper.ext_utils.bot_utils import async_to_sync, sync_to_async from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) from bot.helper.telegram_helper.message_utils import sendStatusMessage from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus -from bot.helper.mirror_leech_utils.status_utils.ytdlp_status import ( +from bot.helper.mirror_leech_utils.status_utils.yt_dlp_download_status import ( YtDlpDownloadStatus, ) @@ -24,18 +23,19 @@ class MyLogger: - def __init__(self, obj): - self.obj = obj + def __init__(self, obj, listener): + self._obj = obj + self._listener = listener def debug(self, msg): - if not self.obj.is_playlist and ( + if not self._obj.is_playlist and ( match := re_search(r".Merger..Merging formats into..(.*?).$", msg) or re_search(r".ExtractAudio..Destination..(.*?)$", msg) ): LOGGER.info(msg) newname = match.group(1) newname = newname.rsplit("/", 1)[-1] - self.obj.name = newname + self._listener.name = newname @staticmethod def warning(msg): @@ -49,23 +49,19 @@ def error(msg): class YoutubeDLHelper: def __init__(self, listener): - self.__last_downloaded = 0 - self.__size = 0 - self.__progress = 0 - self.__downloaded_bytes = 0 - self.__download_speed = 0 - self.__eta = "-" - self.__listener = listener - self.__gid = "" - self.__is_cancelled = False - self.__downloading = False - self.__ext = "" - self.name = "" + self._last_downloaded = 0 + self._progress = 0 + self._downloaded_bytes = 0 + self._download_speed = 0 + self._eta = "-" + self._listener = listener + self._gid = "" + self._downloading = False + self._ext = "" self.is_playlist = False - self.playlist_count = 0 self.opts = { - "progress_hooks": [self.__onDownloadProgress], - "logger": MyLogger(self), + "progress_hooks": [self._onDownloadProgress], + "logger": MyLogger(self, self._listener), "usenetrc": True, "cookiefile": "cookies.txt", "allow_multiple_video_streams": True, @@ -77,143 +73,138 @@ def __init__(self, listener): "trim_file_name": 220, "ffmpeg_location": "/bin/xtra", "retry_sleep_functions": { - "http": lambda _: 3, - "fragment": lambda _: 3, - "file_access": lambda _: 3, - "extractor": lambda _: 3, + "http": lambda n: 3, + "fragment": lambda n: 3, + "file_access": lambda n: 3, + "extractor": lambda n: 3, }, } @property def download_speed(self): - return self.__download_speed + return self._download_speed @property def downloaded_bytes(self): - return self.__downloaded_bytes + return self._downloaded_bytes @property def size(self): - return self.__size + return self._listener.size @property def progress(self): - return self.__progress + return self._progress @property def eta(self): - return self.__eta + return self._eta - def __onDownloadProgress(self, d): - self.__downloading = True - if self.__is_cancelled: + def _onDownloadProgress(self, d): + self._downloading = True + if self._listener.isCancelled: raise ValueError("Cancelling...") if d["status"] == "finished": if self.is_playlist: - self.__last_downloaded = 0 + self._last_downloaded = 0 elif d["status"] == "downloading": - self.__download_speed = d["speed"] + self._download_speed = d["speed"] if self.is_playlist: downloadedBytes = d["downloaded_bytes"] - chunk_size = downloadedBytes - self.__last_downloaded - self.__last_downloaded = downloadedBytes - self.__downloaded_bytes += chunk_size + chunk_size = downloadedBytes - self._last_downloaded + self._last_downloaded = downloadedBytes + self._downloaded_bytes += chunk_size else: if d.get("total_bytes"): - self.__size = d["total_bytes"] + self._listener.size = d["total_bytes"] elif d.get("total_bytes_estimate"): - self.__size = d["total_bytes_estimate"] - self.__downloaded_bytes = d["downloaded_bytes"] - self.__eta = d.get("eta", "-") or "-" - with contextlib.suppress(Exception): - self.__progress = (self.__downloaded_bytes / self.__size) * 100 - - async def __on_download_start(self, from_queue=False): - async with download_dict_lock: - download_dict[self.__listener.uid] = YtDlpDownloadStatus( - self, self.__listener, self.__gid + self._listener.size = d["total_bytes_estimate"] + self._downloaded_bytes = d["downloaded_bytes"] + self._eta = d.get("eta", "-") or "-" + with suppress(Exception): + self._progress = (self._downloaded_bytes / self._listener.size) * 100 + + async def _on_download_start(self, from_queue=False): + async with task_dict_lock: + task_dict[self._listener.mid] = YtDlpDownloadStatus( + self._listener, self, self._gid ) if not from_queue: - await self.__listener.on_download_start() - await sendStatusMessage(self.__listener.message) + await self._listener.on_download_start() + if self._listener.multi <= 1: + await sendStatusMessage(self._listener.message) - def __on_download_error(self, error): - self.__is_cancelled = True - async_to_sync(self.__listener.onDownloadError, error) + def _onDownloadError(self, error): + self._listener.isCancelled = True + async_to_sync(self._listener.onDownloadError, error) - def extractMetaData(self, link, name): - if link.startswith(("rtmp", "mms", "rstp", "rtmps")): + def extractMetaData(self): + if self._listener.link.startswith(("rtmp", "mms", "rstp", "rtmps")): self.opts["external_downloader"] = "ffmpeg" with YoutubeDL(self.opts) as ydl: try: - result = ydl.extract_info(link, download=False) + result = ydl.extract_info(self._listener.link, download=False) if result is None: raise ValueError("Info result is None") except Exception as e: - return self.__on_download_error(str(e)) - if self.is_playlist: - self.playlist_count = result.get("playlist_count", 0) + return self._onDownloadError(str(e)) if "entries" in result: - self.name = name for entry in result["entries"]: if not entry: continue if "filesize_approx" in entry: - self.__size += entry["filesize_approx"] + self._listener.size += entry["filesize_approx"] elif "filesize" in entry: - self.__size += entry["filesize"] - if not self.name: + self._listener.size += entry["filesize"] + if not self._listener.name: outtmpl_ = "%(series,playlist_title,channel)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d.%(ext)s" - self.name, ext = ospath.splitext( + self._listener.name, ext = ospath.splitext( ydl.prepare_filename(entry, outtmpl=outtmpl_) ) - if not self.__ext: - self.__ext = ext + if not self._ext: + self._ext = ext return None outtmpl_ = "%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s" realName = ydl.prepare_filename(result, outtmpl=outtmpl_) ext = ospath.splitext(realName)[-1] - self.name = f"{name}{ext}" if name else realName - if not self.__ext: - self.__ext = ext - if result.get("filesize"): - self.__size = result["filesize"] - return None - if result.get("filesize_approx"): - self.__size = result["filesize_approx"] + self._listener.name = ( + f"{self._listener.name}{ext}" if self._listener.name else realName + ) + if not self._ext: + self._ext = ext return None return None - def __download(self, link, path): + def _download(self, path): try: with YoutubeDL(self.opts) as ydl: try: - ydl.download([link]) + ydl.download([self._listener.link]) except DownloadError as e: - if not self.__is_cancelled: - self.__on_download_error(str(e)) + if not self._listener.isCancelled: + self._onDownloadError(str(e)) return if self.is_playlist and ( not ospath.exists(path) or len(listdir(path)) == 0 ): - self.__on_download_error( + self._onDownloadError( "No video available to download from this playlist. Check logs for more details" ) return - if self.__is_cancelled: + if self._listener.isCancelled: raise ValueError - async_to_sync(self.__listener.on_download_complete) + async_to_sync(self._listener.on_download_complete) except ValueError: - self.__on_download_error("Download Stopped by User!") + self._onDownloadError("Download Stopped by User!") - async def add_download(self, link, path, name, qual, playlist, options): + async def add_download(self, path, qual, playlist, options): if playlist: self.opts["ignoreerrors"] = True self.is_playlist = True - self.__gid = token_hex(4) + self._gid = token_hex(4) - await self.__on_download_start() + await self._on_download_start() self.opts["postprocessors"] = [ { @@ -237,32 +228,34 @@ async def add_download(self, link, path, name, qual, playlist, options): } ) if audio_format == "vorbis": - self.__ext = ".ogg" + self._ext = ".ogg" elif audio_format == "alac": - self.__ext = ".m4a" + self._ext = ".m4a" else: - self.__ext = f".{audio_format}" - - self.opts["format"] = qual + self._ext = f".{audio_format}" if options: - self.__set_options(options) + self._set_options(options) + + self.opts["format"] = qual - await sync_to_async(self.extractMetaData, link, name) - if self.__is_cancelled: + await sync_to_async(self.extractMetaData) + if self._listener.isCancelled: return - base_name, ext = ospath.splitext(self.name) - trim_name = self.name if self.is_playlist else base_name + base_name, ext = ospath.splitext(self._listener.name) + trim_name = self._listener.name if self.is_playlist else base_name if len(trim_name.encode()) > 200: - self.name = ( - self.name[:200] if self.is_playlist else f"{base_name[:200]}{ext}" + self._listener.name = ( + self._listener.name[:200] + if self.is_playlist + else f"{base_name[:200]}{ext}" ) - base_name = ospath.splitext(self.name)[0] + base_name = ospath.splitext(self._listener.name)[0] if self.is_playlist: self.opts["outtmpl"] = { - "default": f"{path}/{self.name}/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", + "default": f"{path}/{self._listener.name}/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", "thumbnail": f"{path}/yt-dlp-thumb/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", } elif any( @@ -279,19 +272,19 @@ async def add_download(self, link, path, name, qual, playlist, options): ] ): self.opts["outtmpl"] = { - "default": f"{path}/{base_name}/{self.name}", + "default": f"{path}/{base_name}/{self._listener.name}", "thumbnail": f"{path}/yt-dlp-thumb/{base_name}.%(ext)s", } else: self.opts["outtmpl"] = { - "default": f"{path}/{self.name}", + "default": f"{path}/{self._listener.name}", "thumbnail": f"{path}/yt-dlp-thumb/{base_name}.%(ext)s", } if qual.startswith("ba/b"): - self.name = f"{base_name}{self.__ext}" + self._listener.name = f"{base_name}{self._ext}" - if self.__listener.is_leech: + if self._listener.is_leech: self.opts["postprocessors"].append( { "format": "jpg", @@ -299,7 +292,7 @@ async def add_download(self, link, path, name, qual, playlist, options): "when": "before_dl", } ) - if self.__ext in [ + if self._ext in [ ".mp3", ".mkv", ".mka", @@ -309,62 +302,52 @@ async def add_download(self, link, path, name, qual, playlist, options): ".m4a", ".mp4", ".mov", - "m4v", + ".m4v", ]: self.opts["postprocessors"].append( { - "already_have_thumbnail": self.__listener.is_leech, + "already_have_thumbnail": self._listener.is_leech, "key": "EmbedThumbnail", } ) - elif not self.__listener.is_leech: + elif not self._listener.is_leech: self.opts["writethumbnail"] = False - msg, button = await stop_duplicate_check(self.name, self.__listener) + msg, button = await stop_duplicate_check(self._listener) if msg: - await self.__listener.onDownloadError(msg, button) + await self._listener.onDownloadError(msg, button) return - if limit_exceeded := await limit_checker( - self.__size, - self.__listener, - is_ytdlp=True, - is_playlist=self.playlist_count, - ): - await self.__listener.onDownloadError(limit_exceeded) - return - added_to_queue, event = await is_queued(self.__listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {self.name}") - async with download_dict_lock: - download_dict[self.__listener.uid] = QueueStatus( - self.name, self.__size, self.__gid, self.__listener, "dl" + + add_to_queue, event = await check_running_tasks(self._listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {self._listener.name}") + async with task_dict_lock: + task_dict[self._listener.mid] = QueueStatus( + self._listener, self._gid, "dl" ) await event.wait() - async with download_dict_lock: - if self.__listener.uid not in download_dict: - return - LOGGER.info(f"Start Queued Download from YT_DLP: {self.name}") - await self.__on_download_start(True) - else: - LOGGER.info(f"Download with YT_DLP: {self.name}") + if self._listener.isCancelled: + return + async with queue_dict_lock: + non_queued_dl.add(self._listener.mid) + LOGGER.info(f"Start Queued Download from YT_DLP: {self._listener.name}") + await self._on_download_start(True) - async with queue_dict_lock: - non_queued_dl.add(self.__listener.uid) + if not add_to_queue: + LOGGER.info(f"Download with YT_DLP: {self._listener.name}") - await sync_to_async(self.__download, link, path) + await sync_to_async(self._download, path) - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Download: {self.name}") - if not self.__downloading: - await self.__listener.onDownloadError("Download Cancelled by User!") + async def cancel_task(self): + self._listener.isCancelled = True + LOGGER.info(f"Cancelling Download: {self._listener.name}") + if not self._downloading: + await self._listener.onDownloadError("Download Cancelled by User!") - def __set_options(self, options): + def _set_options(self, options): options = options.split("|") for opt in options: key, value = map(str.strip, opt.split(":", 1)) - if key == "format" and value.startswith("ba/b-"): - continue if value.startswith("^"): if "." in value or value == "^inf": value = float(value.split("^", 1)[1]) diff --git a/bot/helper/mirror_leech_utils/upload_utils/__init__.py b/bot/helper/mirror_leech_utils/gdrive_utils/__init__.py similarity index 100% rename from bot/helper/mirror_leech_utils/upload_utils/__init__.py rename to bot/helper/mirror_leech_utils/gdrive_utils/__init__.py diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/clone.py b/bot/helper/mirror_leech_utils/gdrive_utils/clone.py new file mode 100644 index 000000000..44be1f7aa --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/clone.py @@ -0,0 +1,170 @@ +from os import path as ospath +from time import time +from logging import getLogger + +from tenacity import ( + RetryError, + retry, + wait_exponential, + stop_after_attempt, + retry_if_exception_type, +) +from googleapiclient.errors import HttpError + +from bot.helper.ext_utils.bot_utils import async_to_sync +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class gdClone(GoogleDriveHelper): + def __init__(self, listener): + self.listener = listener + self._start_time = time() + super().__init__() + self.is_cloning = True + self.user_setting() + + def user_setting(self): + if self.listener.upDest.startswith("mtp:") or self.listener.link.startswith( + "mtp:" + ): + self.token_path = f"tokens/{self.listener.userId}.pickle" + self.listener.upDest = self.listener.upDest.replace("mtp:", "", 1) + self.use_sa = False + elif self.listener.upDest.startswith("tp:"): + self.listener.upDest = self.listener.upDest.replace("tp:", "", 1) + self.use_sa = False + elif self.listener.upDest.startswith("sa:") or self.listener.link.startswith( + "sa:" + ): + self.listener.upDest = self.listener.upDest.replace("sa:", "", 1) + self.use_sa = True + + def clone(self): + try: + file_id = self.getIdFromUrl(self.listener.link) + except (KeyError, IndexError): + return ( + "Google Drive ID could not be found in the provided link", + None, + None, + None, + None, + ) + self.service = self.authorize() + msg = "" + LOGGER.info(f"File ID: {file_id}") + try: + meta = self.getFileMetadata(file_id) + mime_type = meta.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + dir_id = self.create_directory( + meta.get("name"), self.listener.upDest + ) + self._cloneFolder(meta.get("name"), meta.get("id"), dir_id) + durl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) + if self.listener.isCancelled: + LOGGER.info("Deleting cloned data from Drive...") + self.service.files().delete( + fileId=dir_id, supportsAllDrives=True + ).execute() + return None, None, None, None, None + mime_type = "Folder" + self.listener.size = self.proc_bytes + else: + file = self._copyFile(meta.get("id"), self.listener.upDest) + msg += f'Name:{file.get("name")}
' + durl = self.G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) + if mime_type is None: + mime_type = "File" + self.listener.size = int(meta.get("size", 0)) + return ( + durl, + mime_type, + self.total_files, + self.total_folders, + self.getIdFromUrl(durl), + ) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + if "User rate limit exceeded" in err: + msg = "User rate limit exceeded." + elif "File not found" in err: + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + return self.clone() + msg = "File not found." + else: + msg = f"Error.\n{err}" + async_to_sync(self.listener.onUploadError, msg) + return None, None, None, None, None + + def _cloneFolder(self, folder_name, folder_id, dest_id): + LOGGER.info(f"Syncing: {folder_name}") + files = self.getFilesByFolderId(folder_id) + if len(files) == 0: + return dest_id + for file in files: + if file.get("mimeType") == self.G_DRIVE_DIR_MIME_TYPE: + self.total_folders += 1 + file_path = ospath.join(folder_name, file.get("name")) + current_dir_id = self.create_directory(file.get("name"), dest_id) + self._cloneFolder(file_path, file.get("id"), current_dir_id) + elif ( + not file.get("name") + .lower() + .endswith(tuple(self.listener.extensionFilter)) + ): + self.total_files += 1 + self._copyFile(file.get("id"), dest_id) + self.proc_bytes += int(file.get("size", 0)) + self.total_time = int(time() - self._start_time) + if self.listener.isCancelled: + break + return None + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def _copyFile(self, file_id, dest_id): + body = {"parents": [dest_id]} + try: + return ( + self.service.files() + .copy(fileId=file_id, body=body, supportsAllDrives=True) + .execute() + ) + except HttpError as err: + if err.resp.get("content-type", "").startswith("application/json"): + reason = ( + eval(err.content).get("error").get("errors")[0].get("reason") + ) + if reason not in [ + "userRateLimitExceeded", + "dailyLimitExceeded", + "cannotCopyFile", + ]: + raise err + if reason == "cannotCopyFile": + LOGGER.error(err) + elif self.use_sa: + if self.sa_count >= self.sa_number: + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}" + ) + raise err + if self.listener.isCancelled: + return None + self.switchServiceAccount() + return self._copyFile(file_id, dest_id) + else: + LOGGER.error(f"Got: {reason}") + raise err diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/count.py b/bot/helper/mirror_leech_utils/gdrive_utils/count.py new file mode 100644 index 000000000..fc5d6b666 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/count.py @@ -0,0 +1,81 @@ +from logging import getLogger + +from tenacity import RetryError + +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class gdCount(GoogleDriveHelper): + def __init__(self): + super().__init__() + + def count(self, link, user_id): + try: + file_id = self.getIdFromUrl(link, user_id) + except (KeyError, IndexError): + return ( + "Google Drive ID could not be found in the provided link", + None, + None, + None, + None, + ) + self.service = self.authorize() + LOGGER.info(f"File ID: {file_id}") + try: + return self._proceed_count(file_id) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + if "File not found" in err: + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + return self.count(link, user_id) + msg = "File not found." + else: + msg = f"Error.\n{err}" + return msg, None, None, None, None + + def _proceed_count(self, file_id): + meta = self.getFileMetadata(file_id) + name = meta["name"] + LOGGER.info(f"Counting: {name}") + mime_type = meta.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + self._gDrive_directory(meta) + mime_type = "Folder" + else: + if mime_type is None: + mime_type = "File" + self.total_files += 1 + self._gDrive_file(meta) + return name, mime_type, self.proc_bytes, self.total_files, self.total_folders + + def _gDrive_file(self, filee): + size = int(filee.get("size", 0)) + self.proc_bytes += size + + def _gDrive_directory(self, drive_folder): + files = self.getFilesByFolderId(drive_folder["id"]) + if len(files) == 0: + return + for filee in files: + shortcut_details = filee.get("shortcutDetails") + if shortcut_details is not None: + mime_type = shortcut_details["targetMimeType"] + file_id = shortcut_details["targetId"] + filee = self.getFileMetadata(file_id) + else: + mime_type = filee.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + self.total_folders += 1 + self._gDrive_directory(filee) + else: + self.total_files += 1 + self._gDrive_file(filee) diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/delete.py b/bot/helper/mirror_leech_utils/gdrive_utils/delete.py new file mode 100644 index 000000000..2dc1b3a0e --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/delete.py @@ -0,0 +1,39 @@ +from logging import getLogger + +from googleapiclient.errors import HttpError + +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class gdDelete(GoogleDriveHelper): + def __init__(self): + super().__init__() + + def deletefile(self, link, user_id): + try: + file_id = self.getIdFromUrl(link, user_id) + except (KeyError, IndexError): + return "Google Drive ID could not be found in the provided link" + self.service = self.authorize() + msg = "" + try: + self.service.files().delete( + fileId=file_id, supportsAllDrives=True + ).execute() + msg = "Successfully deleted" + LOGGER.info(f"Delete Result: {msg}") + except HttpError as err: + if "File not found" in str(err) or "insufficientFilePermissions" in str( + err + ): + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + return self.deletefile(link, user_id) + err = "File not found or insufficientFilePermissions!" + LOGGER.error(f"Delete Result: {err}") + msg = str(err) + return msg diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/download.py b/bot/helper/mirror_leech_utils/gdrive_utils/download.py new file mode 100644 index 000000000..c18f26f11 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/download.py @@ -0,0 +1,164 @@ +from io import FileIO +from os import path as ospath +from os import makedirs +from logging import getLogger + +from tenacity import ( + RetryError, + retry, + wait_exponential, + stop_after_attempt, + retry_if_exception_type, +) +from googleapiclient.http import MediaIoBaseDownload +from googleapiclient.errors import HttpError + +from bot.helper.ext_utils.bot_utils import setInterval, async_to_sync +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class gdDownload(GoogleDriveHelper): + def __init__(self, listener, path): + self.listener = listener + self._updater = None + self._path = path + super().__init__() + self.is_downloading = True + + def download(self): + file_id = self.getIdFromUrl(self.listener.link, self.listener.userId) + self.service = self.authorize() + self._updater = setInterval(self.update_interval, self.progress) + try: + meta = self.getFileMetadata(file_id) + if meta.get("mimeType") == self.G_DRIVE_DIR_MIME_TYPE: + self._download_folder(file_id, self._path, self.listener.name) + else: + makedirs(self._path, exist_ok=True) + self._download_file( + file_id, self._path, self.listener.name, meta.get("mimeType") + ) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + if "downloadQuotaExceeded" in err: + err = "Download Quota Exceeded." + elif "File not found" in err: + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + self._updater.cancel() + return self.download() + err = "File not found!" + async_to_sync(self.listener.onDownloadError, err) + self.listener.isCancelled = True + finally: + self._updater.cancel() + if self.listener.isCancelled: + return None + async_to_sync(self.listener.on_download_complete) + + def _download_folder(self, folder_id, path, folder_name): + folder_name = folder_name.replace("/", "") + if not ospath.exists(f"{path}/{folder_name}"): + makedirs(f"{path}/{folder_name}") + path += f"/{folder_name}" + result = self.getFilesByFolderId(folder_id) + if len(result) == 0: + return + result = sorted(result, key=lambda k: k["name"]) + for item in result: + file_id = item["id"] + filename = item["name"] + shortcut_details = item.get("shortcutDetails") + if shortcut_details is not None: + file_id = shortcut_details["targetId"] + mime_type = shortcut_details["targetMimeType"] + else: + mime_type = item.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + self._download_folder(file_id, path, filename) + elif not ospath.isfile( + f"{path}{filename}" + ) and not filename.lower().endswith( + tuple(self.listener.extensionFilter) + ): + self._download_file(file_id, path, filename, mime_type) + if self.listener.isCancelled: + break + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=(retry_if_exception_type(Exception)), + ) + def _download_file(self, file_id, path, filename, mime_type, export=False): + if export: + request = self.service.files().export_media( + fileId=file_id, mimeType="application/pdf" + ) + else: + request = self.service.files().get_media( + fileId=file_id, supportsAllDrives=True, acknowledgeAbuse=True + ) + filename = filename.replace("/", "") + if export: + filename = f"{filename}.pdf" + if len(filename.encode()) > 255: + ext = ospath.splitext(filename)[1] + filename = f"{filename[:245]}{ext}" + + if self.listener.name.endswith(ext): + self.listener.name = filename + if self.listener.isCancelled: + return None + fh = FileIO(f"{path}/{filename}", "wb") + downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024) + done = False + retries = 0 + while not done: + if self.listener.isCancelled: + fh.close() + break + try: + self.status, done = downloader.next_chunk() + except HttpError as err: + LOGGER.error(err) + if err.resp.status in [500, 502, 503, 504, 429] and retries < 10: + retries += 1 + continue + if err.resp.get("content-type", "").startswith("application/json"): + reason = ( + eval(err.content).get("error").get("errors")[0].get("reason") + ) + if "fileNotDownloadable" in reason and "document" in mime_type: + return self._download_file( + file_id, path, filename, mime_type, True + ) + if reason not in [ + "downloadQuotaExceeded", + "dailyLimitExceeded", + ]: + raise err + if self.use_sa: + if self.sa_count >= self.sa_number: + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}" + ) + raise err + if self.listener.isCancelled: + return None + self.switchServiceAccount() + LOGGER.info(f"Got: {reason}, Trying Again...") + return self._download_file( + file_id, path, filename, mime_type + ) + LOGGER.error(f"Got: {reason}") + raise err + self.file_processed_bytes = 0 + return None diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/helper.py b/bot/helper/mirror_leech_utils/gdrive_utils/helper.py new file mode 100644 index 000000000..8ceee5dcc --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/helper.py @@ -0,0 +1,266 @@ +from os import path as ospath +from os import listdir +from re import search as re_search +from pickle import load as pload +from random import randrange +from logging import ERROR, getLogger +from urllib.parse import parse_qs, urlparse + +from tenacity import ( + retry, + wait_exponential, + stop_after_attempt, + retry_if_exception_type, +) +from google.oauth2 import service_account +from google_auth_httplib2 import AuthorizedHttp +from googleapiclient.http import build_http +from googleapiclient.discovery import build + +from bot import config_dict +from bot.helper.ext_utils.links_utils import is_gdrive_id + +LOGGER = getLogger(__name__) +getLogger("googleapiclient.discovery").setLevel(ERROR) + + +class GoogleDriveHelper: + def __init__(self): + self._OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"] + self.token_path = "token.pickle" + self.G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder" + self.G_DRIVE_BASE_DOWNLOAD_URL = ( + "https://drive.google.com/uc?id={}&export=download" + ) + self.G_DRIVE_DIR_BASE_DOWNLOAD_URL = ( + "https://drive.google.com/drive/folders/{}" + ) + self.is_uploading = False + self.is_downloading = False + self.is_cloning = False + self.sa_index = 0 + self.sa_count = 1 + self.sa_number = 100 + self.alt_auth = False + self.service = None + self.total_files = 0 + self.total_folders = 0 + self.file_processed_bytes = 0 + self.proc_bytes = 0 + self.total_time = 0 + self.status = None + self.update_interval = 3 + self.use_sa = config_dict["USE_SA"] + + @property + def speed(self): + try: + return self.proc_bytes / self.total_time + except Exception: + return 0 + + @property + def processed_bytes(self): + return self.proc_bytes + + async def progress(self): + if self.status is not None: + chunk_size = ( + self.status.total_size * self.status.progress() + - self.file_processed_bytes + ) + self.file_processed_bytes = ( + self.status.total_size * self.status.progress() + ) + self.proc_bytes += chunk_size + self.total_time += self.update_interval + + def authorize(self): + credentials = None + if self.use_sa: + json_files = listdir("accounts") + self.sa_number = len(json_files) + self.sa_index = randrange(self.sa_number) + LOGGER.info( + f"Authorizing with {json_files[self.sa_index]} service account" + ) + credentials = service_account.Credentials.from_service_account_file( + f"accounts/{json_files[self.sa_index]}", scopes=self._OAUTH_SCOPE + ) + elif ospath.exists(self.token_path): + LOGGER.info(f"Authorize with {self.token_path}") + with open(self.token_path, "rb") as f: + credentials = pload(f) + else: + LOGGER.error("token.pickle not found!") + authorized_http = AuthorizedHttp(credentials, http=build_http()) + authorized_http.http.disable_ssl_certificate_validation = True + return build("drive", "v3", http=authorized_http, cache_discovery=False) + + def switchServiceAccount(self): + if self.sa_index == self.sa_number - 1: + self.sa_index = 0 + else: + self.sa_index += 1 + self.sa_count += 1 + LOGGER.info(f"Switching to {self.sa_index} index") + self.service = self.authorize() + + def getIdFromUrl(self, link, user_id=""): + if user_id and link.startswith("mtp:"): + self.use_sa = False + self.token_path = f"tokens/{user_id}.pickle" + link = link.replace("mtp:", "", 1) + elif link.startswith("sa:"): + self.use_sa = True + link = link.replace("sa:", "", 1) + elif link.startswith("tp:"): + self.use_sa = False + link = link.replace("tp:", "", 1) + if is_gdrive_id(link): + return link + if "folders" in link or "file" in link: + regex = r"https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)" + res = re_search(regex, link) + if res is None: + raise IndexError("G-Drive ID not found.") + return res.group(3) + parsed = urlparse(link) + return parse_qs(parsed.query)["id"][0] + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def set_permission(self, file_id): + permissions = { + "role": "reader", + "type": "anyone", + "value": None, + "withLink": True, + } + return ( + self.service.permissions() + .create(fileId=file_id, body=permissions, supportsAllDrives=True) + .execute() + ) + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def getFileMetadata(self, file_id): + return ( + self.service.files() + .get( + fileId=file_id, + supportsAllDrives=True, + fields="name, id, mimeType, size", + ) + .execute() + ) + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def getFilesByFolderId(self, folder_id, item_type=""): + page_token = None + files = [] + if not item_type: + q = f"'{folder_id}' in parents and trashed = false" + elif item_type == "folders": + q = f"'{folder_id}' in parents and mimeType = '{self.G_DRIVE_DIR_MIME_TYPE}' and trashed = false" + else: + q = f"'{folder_id}' in parents and mimeType != '{self.G_DRIVE_DIR_MIME_TYPE}' and trashed = false" + while True: + response = ( + self.service.files() + .list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + q=q, + spaces="drive", + pageSize=200, + fields="nextPageToken, files(id, name, mimeType, size, shortcutDetails)", + orderBy="folder, name", + pageToken=page_token, + ) + .execute() + ) + files.extend(response.get("files", [])) + page_token = response.get("nextPageToken") + if page_token is None: + break + return files + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def create_directory(self, directory_name, dest_id): + file_metadata = { + "name": directory_name, + "description": "Uploaded by @ProjectAeon", + "mimeType": self.G_DRIVE_DIR_MIME_TYPE, + } + if dest_id is not None: + file_metadata["parents"] = [dest_id] + file = ( + self.service.files() + .create(body=file_metadata, supportsAllDrives=True) + .execute() + ) + file_id = file.get("id") + if not config_dict["IS_TEAM_DRIVE"]: + self.set_permission(file_id) + LOGGER.info( + f'Created G-Drive Folder:\nName: {file.get("name")}\nID: {file_id}' + ) + return file_id + + def escapes(self, estr): + chars = ["\\", "'", '"', r"\a", r"\b", r"\f", r"\n", r"\r", r"\t"] + for char in chars: + estr = estr.replace(char, f"\\{char}") + return estr.strip() + + """ + def get_recursive_list(self, file, rootId): + rtnlist = [] + if not rootId: + rootId = file.get('teamDriveId') + if rootId == "root": + rootId = self.service.files().get( + fileId='root', fields='id').execute().get('id') + x = file.get("name") + y = file.get("id") + while (y != rootId): + rtnlist.append(x) + file = self.service.files().get(fileId=file.get("parents")[0], supportsAllDrives=True, + fields='id, name, parents').execute() + x = file.get("name") + y = file.get("id") + rtnlist.reverse() + return rtnlist + """ + + async def cancel_task(self): + self.listener.isCancelled = True + if self.is_downloading: + LOGGER.info(f"Cancelling Download: {self.listener.name}") + await self.listener.onDownloadError("Download stopped by user!") + elif self.is_cloning: + LOGGER.info(f"Cancelling Clone: {self.listener.name}") + await self.listener.onUploadError( + "your clone has been stopped and cloned data has been deleted!" + ) + elif self.is_uploading: + LOGGER.info(f"Cancelling Upload: {self.listener.name}") + await self.listener.onUploadError( + "your upload has been stopped and uploaded data has been deleted!" + ) diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/list.py b/bot/helper/mirror_leech_utils/gdrive_utils/list.py new file mode 100644 index 000000000..73b9acb0c --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/list.py @@ -0,0 +1,382 @@ +from time import time +from asyncio import Event, gather, wait_for, wrap_future +from logging import getLogger +from functools import partial + +from natsort import natsorted +from tenacity import RetryError +from aiofiles.os import path as aiopath +from pyrogram.filters import user, regex +from pyrogram.handlers import CallbackQueryHandler + +from bot import config_dict +from bot.helper.ext_utils.bot_utils import new_task, new_thread, update_user_ldata +from bot.helper.ext_utils.db_handler import Database +from bot.helper.ext_utils.status_utils import ( + get_readable_time, + get_readable_file_size, +) +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + edit_message, + send_message, + delete_message, +) +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + +LIST_LIMIT = 6 + + +@new_task +async def id_updates(_, query, obj): + await query.answer() + message = query.message + data = query.data.split() + if data[1] == "cancel": + obj.id = "Task has been cancelled!" + obj.listener.isCancelled = True + obj.event.set() + await delete_message(message) + return + if obj.query_proc: + return + obj.query_proc = True + if data[1] == "pre": + obj.iter_start -= LIST_LIMIT * obj.page_step + await obj.get_items_buttons() + elif data[1] == "nex": + obj.iter_start += LIST_LIMIT * obj.page_step + await obj.get_items_buttons() + elif data[1] == "back": + if data[2] == "dr": + await obj.choose_token() + else: + await obj.get_pevious_id() + elif data[1] == "dr": + index = int(data[2]) + i = obj.drives[index] + obj.id = i["id"] + obj.parents = [{"id": i["id"], "name": i["name"]}] + await obj.get_items() + elif data[1] == "pa": + index = int(data[3]) + i = obj.items_list[index] + obj.id = i["id"] + if data[2] == "fo": + obj.parents.append({"id": i["id"], "name": i["name"]}) + await obj.get_items() + else: + await delete_message(message) + obj.event.set() + elif data[1] == "ps": + if obj.page_step == int(data[2]): + return + obj.page_step = int(data[2]) + await obj.get_items_buttons() + elif data[1] == "root": + obj.id = obj.parents[0]["id"] + obj.parents = [obj.parents[0]] + await obj.get_items() + elif data[1] == "itype": + obj.item_type = data[2] + await obj.get_items() + elif data[1] == "cur": + await delete_message(message) + obj.event.set() + elif data[1] == "def": + if obj.token_path != obj.user_token_path: + id_ = f"sa:{obj.id}" if obj.use_sa else f"tp:{obj.id}" + else: + id_ = f"mtp:{obj.id}" + if id_ != obj.listener.userDict.get("gdrive_id"): + update_user_ldata(obj.listener.userId, "gdrive_id", id_) + await obj.get_items_buttons() + await Database().update_user_data(obj.listener.userId) + elif data[1] == "owner": + obj.token_path = "token.pickle" + obj.use_sa = False + obj.id = "" + obj.parents = [] + await obj.list_drives() + elif data[1] == "user": + obj.token_path = obj.user_token_path + obj.use_sa = False + obj.id = "" + obj.parents = [] + await obj.list_drives() + elif data[1] == "sa": + obj.token_path = "accounts" + obj.use_sa = True + obj.id = "" + obj.parents = [] + await obj.list_drives() + obj.query_proc = False + + +class gdriveList(GoogleDriveHelper): + def __init__(self, listener): + self.listener = listener + self._token_user = False + self._token_owner = False + self._sa_owner = False + self._reply_to = None + self._time = time() + self._timeout = 240 + self.drives = [] + self.query_proc = False + self.item_type = "folders" + self.event = Event() + self.user_token_path = f"tokens/{self.listener.userId}.pickle" + self.id = "" + self.parents = [] + self.list_status = "" + self.items_list = [] + self.iter_start = 0 + self.page_step = 1 + super().__init__() + + @new_thread + async def _event_handler(self): + pfunc = partial(id_updates, obj=self) + handler = self.listener.client.add_handler( + CallbackQueryHandler( + pfunc, filters=regex("^gdq") & user(self.listener.userId) + ), + group=-1, + ) + try: + await wait_for(self.event.wait(), timeout=self._timeout) + except Exception: + self.id = "Timed Out. Task has been cancelled!" + self.listener.isCancelled = True + self.event.set() + finally: + self.listener.client.remove_handler(*handler) + + async def _send_list_message(self, msg, button): + if not self.listener.isCancelled: + if self._reply_to is None: + self._reply_to = await send_message( + self.listener.message, msg, button + ) + else: + await edit_message(self._reply_to, msg, button) + + async def get_items_buttons(self): + items_no = len(self.items_list) + pages = (items_no + LIST_LIMIT - 1) // LIST_LIMIT + if items_no <= self.iter_start: + self.iter_start = 0 + elif self.iter_start < 0 or self.iter_start > items_no: + self.iter_start = LIST_LIMIT * (pages - 1) + page = (self.iter_start / LIST_LIMIT) + 1 if self.iter_start != 0 else 1 + buttons = ButtonMaker() + for index, item in enumerate( + self.items_list[self.iter_start : LIST_LIMIT + self.iter_start] + ): + orig_index = index + self.iter_start + if item["mimeType"] == self.G_DRIVE_DIR_MIME_TYPE: + ptype = "fo" + name = item["name"] + else: + ptype = "fi" + name = ( + f"[{get_readable_file_size(float(item['size']))}] {item['name']}" + ) + buttons.callback(name, f"gdq pa {ptype} {orig_index}") + if items_no > LIST_LIMIT: + for i in [1, 2, 4, 6, 10, 30, 50, 100]: + buttons.callback(i, f"gdq ps {i}", position="header") + buttons.callback("Previous", "gdq pre", position="footer") + buttons.callback("Next", "gdq nex", position="footer") + if self.list_status == "gdd": + if self.item_type == "folders": + buttons.callback("Files", "gdq itype files", position="footer") + else: + buttons.callback("Folders", "gdq itype folders", position="footer") + if self.list_status == "gdu" or len(self.items_list) > 0: + buttons.callback("Choose Current Path", "gdq cur", position="footer") + if self.list_status == "gdu": + buttons.callback("Set as Default Path", "gdq def", position="footer") + if ( + len(self.parents) > 1 + and len(self.drives) > 1 + or self._token_user + and self._token_owner + ): + buttons.callback("Back", "gdq back pa", position="footer") + if len(self.parents) > 1: + buttons.callback("Back To Root", "gdq root", position="footer") + buttons.callback("Cancel", "gdq cancel", position="footer") + button = buttons.menu(f_cols=2) + msg = "Choose Path:" + ( + "\nTransfer Type: Download" + if self.list_status == "gdd" + else "\nTransfer Type: Upload" + ) + if self.list_status == "gdu": + default_id = ( + self.listener.userDict.get("gdrive_id") or config_dict["GDRIVE_ID"] + ) + msg += f"\nDefault Gdrive ID: {default_id}" if default_id else "" + msg += f"\n\nItems: {items_no}" + if items_no > LIST_LIMIT: + msg += f" | Page: {int(page)}/{pages} | Page Step: {self.page_step}" + msg += f"\n\nItem Type: {self.item_type}\nToken Path: {self.token_path}" + msg += f"\n\nCurrent ID:{self.id}
" + msg += f"\nCurrent Path:{('/').join(i['name'] for i in self.parents)}
" + msg += ( + f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + ) + await self._send_list_message(msg, button) + + async def get_items(self, itype=""): + if itype: + self.item_type == itype + elif self.list_status == "gdu": + self.item_type == "folders" + try: + files = self.getFilesByFolderId(self.id, self.item_type) + if self.listener.isCancelled: + return None + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + self.id = str(err).replace(">", "").replace("<", "") + self.event.set() + return None + if len(files) == 0 and itype != self.item_type and self.list_status == "gdd": + itype = "folders" if self.item_type == "files" else "files" + self.item_type = itype + return await self.get_items(itype) + self.items_list = natsorted(files) + self.iter_start = 0 + await self.get_items_buttons() + return None + + async def list_drives(self): + self.service = self.authorize() + try: + result = self.service.drives().list(pageSize="100").execute() + except Exception as e: + self.id = str(e) + self.event.set() + return + drives = result["drives"] + if len(drives) == 0 and not self.use_sa: + self.drives = [{"id": "root", "name": "root"}] + self.parents = [{"id": "root", "name": "root"}] + self.id = "root" + await self.get_items() + elif len(drives) == 0: + msg = "Service accounts Doesn't have access to any drive!" + buttons = ButtonMaker() + if self._token_user and self._token_owner: + buttons.callback("Back", "gdq back dr", position="footer") + buttons.callback("Cancel", "gdq cancel", position="footer") + button = buttons.menu(2) + await self._send_list_message(msg, button) + elif self.use_sa and len(drives) == 1: + self.id = drives[0]["id"] + self.drives = [{"id": self.id, "name": drives[0]["name"]}] + self.parents = [{"id": self.id, "name": drives[0]["name"]}] + await self.get_items() + else: + msg = "Choose Drive:" + ( + "\nTransfer Type: Download" + if self.list_status == "gdd" + else "\nTransfer Type: Upload" + ) + msg += f"\nToken Path: {self.token_path}" + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + buttons = ButtonMaker() + self.drives.clear() + self.parents.clear() + if not self.use_sa: + buttons.callback("root", "gdq dr 0") + self.drives = [{"id": "root", "name": "root"}] + for index, item in enumerate(drives, start=1): + self.drives.append({"id": item["id"], "name": item["name"]}) + buttons.callback(item["name"], f"gdq dr {index}") + if self._token_user and self._token_owner: + buttons.callback("Back", "gdq back dr", position="footer") + buttons.callback("Cancel", "gdq cancel", position="footer") + button = buttons.menu(2) + await self._send_list_message(msg, button) + + async def choose_token(self): + if ( + self._token_user + and self._token_owner + or self._sa_owner + and self._token_owner + or self._sa_owner + and self._token_user + ): + msg = "Choose Token:" + ( + "\nTransfer Type: Download" + if self.list_status == "gdd" + else "\nTransfer Type: Upload" + ) + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + buttons = ButtonMaker() + if self._token_owner: + buttons.callback("Owner Token", "gdq owner") + if self._sa_owner: + buttons.callback("Service Accounts", "gdq sa") + if self._token_user: + buttons.callback("My Token", "gdq user") + buttons.callback("Cancel", "gdq cancel") + button = buttons.menu(2) + await self._send_list_message(msg, button) + else: + if self._token_owner: + self.token_path = "token.pickle" + self.use_sa = False + elif self._token_user: + self.token_path = self.user_token_path + self.use_sa = False + else: + self.token_path = "accounts" + self.use_sa = True + await self.list_drives() + + async def get_pevious_id(self): + if self.parents: + self.parents.pop() + if self.parents: + self.id = self.parents[-1]["id"] + await self.get_items() + else: + await self.list_drives() + else: + await self.list_drives() + + async def get_target_id(self, status, token_path=None): + self.list_status = status + future = self._event_handler() + if token_path is None: + self._token_user, self._token_owner, self._sa_owner = await gather( + aiopath.exists(self.user_token_path), + aiopath.exists("token.pickle"), + aiopath.exists("accounts"), + ) + if not self._token_owner and not self._token_user and not self._sa_owner: + self.event.set() + return "token.pickle or service accounts are not Exists!" + await self.choose_token() + else: + self.token_path = token_path + self.use_sa = self.token_path == "accounts" + await self.list_drives() + await wrap_future(future) + if self._reply_to: + await delete_message(self._reply_to) + if not self.listener.isCancelled: + if self.token_path == self.user_token_path: + return f"mtp:{self.id}" + return f"sa:{self.id}" if self.use_sa else f"tp:{self.id}" + return self.id diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/search.py b/bot/helper/mirror_leech_utils/gdrive_utils/search.py new file mode 100644 index 000000000..83ba5c604 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/search.py @@ -0,0 +1,183 @@ +from logging import getLogger + +from bot import DRIVES_IDS, INDEX_URLS, DRIVES_NAMES, user_data +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class gdSearch(GoogleDriveHelper): + def __init__(self, stopDup=False, noMulti=False, is_recursive=True, itemType=""): + super().__init__() + self._stopDup = stopDup + self._noMulti = noMulti + self._is_recursive = is_recursive + self._itemType = itemType + + def _drive_query(self, dirId, fileName, is_recursive): + try: + if is_recursive: + if self._stopDup: + query = f"name = '{fileName}' and " + else: + fileName = fileName.split() + query = "".join( + f"name contains '{name}' and " + for name in fileName + if name != "" + ) + if self._itemType == "files": + query += f"mimeType != '{self.G_DRIVE_DIR_MIME_TYPE}' and " + elif self._itemType == "folders": + query += f"mimeType = '{self.G_DRIVE_DIR_MIME_TYPE}' and " + query += "trashed = false" + if dirId == "root": + return ( + self.service.files() + .list( + q=f"{query} and 'me' in owners", + pageSize=200, + spaces="drive", + fields="files(id, name, mimeType, size, parents)", + orderBy="folder, name asc", + ) + .execute() + ) + return ( + self.service.files() + .list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + driveId=dirId, + q=query, + spaces="drive", + pageSize=150, + fields="files(id, name, mimeType, size, teamDriveId, parents)", + corpora="drive", + orderBy="folder, name asc", + ) + .execute() + ) + if self._stopDup: + query = f"'{dirId}' in parents and name = '{fileName}' and " + else: + query = f"'{dirId}' in parents and " + fileName = fileName.split() + for name in fileName: + if name != "": + query += f"name contains '{name}' and " + if self._itemType == "files": + query += f"mimeType != '{self.G_DRIVE_DIR_MIME_TYPE}' and " + elif self._itemType == "folders": + query += f"mimeType = '{self.G_DRIVE_DIR_MIME_TYPE}' and " + query += "trashed = false" + return ( + self.service.files() + .list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + q=query, + spaces="drive", + pageSize=150, + fields="files(id, name, mimeType, size)", + orderBy="folder, name asc", + ) + .execute() + ) + except Exception as err: + err = str(err).replace(">", "").replace("<", "") + LOGGER.error(err) + return {"files": []} + + def drive_list(self, fileName, target_id="", user_id=""): + msg = "" + fileName = self.escapes(str(fileName)) + contents_no = 0 + telegraph_content = [] + Title = False + + if target_id.startswith("mtp:"): + drives = self.get_user_drive(target_id, user_id) + elif target_id: + drives = [ + ( + "From Owner", + target_id.replace("tp:", "", 1), + INDEX_URLS[0] if INDEX_URLS else "", + ) + ] + else: + drives = zip(DRIVES_NAMES, DRIVES_IDS, INDEX_URLS) + if ( + not target_id.startswith("mtp:") + and len(DRIVES_IDS) > 1 + or target_id.startswith("tp:") + ): + self.use_sa = False + + self.service = self.authorize() + + for drive_name, dir_id, index_url in drives: + isRecur = ( + False + if self._is_recursive and len(dir_id) > 23 + else self._is_recursive + ) + response = self._drive_query(dir_id, fileName, isRecur) + if not response["files"]: + if self._noMulti: + break + continue + if not Title: + msg += f"Search Result For {fileName}
" + Title = True + if drive_name: + msg += f"╾────────────╼
{drive_name}
╾────────────╼
" + for file in response.get("files", []): + mime_type = file.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + furl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(file.get("id")) + msg += f"📁{file.get('name')}
(folder)
" + msg += f"Drive Link" + if index_url: + url = f'{index_url}findpath?id={file.get("id")}' + msg += f' | Index Link' + elif mime_type == "application/vnd.google-apps.shortcut": + furl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(file.get("id")) + msg += ( + f"⁍{file.get('name')}" + f" (shortcut)" + ) + else: + furl = self.G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) + msg += f"📄{file.get('name')}
({get_readable_file_size(int(file.get('size', 0)))})
" + msg += f"Drive Link" + if index_url: + url = f'{index_url}findpath?id={file.get("id")}' + msg += f' | Index Link' + if mime_type.startswith(("image", "video", "audio")): + urlv = ( + f'{index_url}findpath?id={file.get("id")}&view=true' + ) + msg += f' | View Link' + msg += "
" + contents_no += 1 + if len(msg.encode("utf-8")) > 39000: + telegraph_content.append(msg) + msg = "" + if self._noMulti: + break + + if msg != "": + telegraph_content.append(msg) + + return telegraph_content, contents_no + + def get_user_drive(self, target_id, user_id): + dest_id = target_id.replace("mtp:", "", 1) + self.token_path = f"tokens/{user_id}.pickle" + self.use_sa = False + user_dict = user_data.get(user_id, {}) + INDEX = user_dict["index_url"] if user_dict.get("index_url") else "" + return [("User Choice", dest_id, INDEX)] diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/upload.py b/bot/helper/mirror_leech_utils/gdrive_utils/upload.py new file mode 100644 index 000000000..b3796069c --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/upload.py @@ -0,0 +1,243 @@ +from os import path as ospath +from os import remove, listdir +from logging import getLogger +from contextlib import suppress + +from tenacity import ( + RetryError, + retry, + wait_exponential, + stop_after_attempt, + retry_if_exception_type, +) +from googleapiclient.http import MediaFileUpload +from googleapiclient.errors import HttpError + +from bot import config_dict +from bot.helper.ext_utils.bot_utils import setInterval, async_to_sync +from bot.helper.ext_utils.files_utils import get_mime_type +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class gdUpload(GoogleDriveHelper): + def __init__(self, listener, path): + self.listener = listener + self._updater = None + self._path = path + self._is_errored = False + super().__init__() + self.is_uploading = True + + def user_setting(self): + if self.listener.upDest.startswith("mtp:"): + self.token_path = f"tokens/{self.listener.userId}.pickle" + self.listener.upDest = self.listener.upDest.replace("mtp:", "", 1) + self.use_sa = False + elif self.listener.upDest.startswith("tp:"): + self.listener.upDest = self.listener.upDest.replace("tp:", "", 1) + self.use_sa = False + elif self.listener.upDest.startswith("sa:"): + self.listener.upDest = self.listener.upDest.replace("sa:", "", 1) + self.use_sa = True + + def upload(self, unwanted_files, ft_delete): + self.user_setting() + self.service = self.authorize() + LOGGER.info(f"Uploading: {self._path}") + self._updater = setInterval(self.update_interval, self.progress) + try: + if ospath.isfile(self._path): + if self._path.lower().endswith(tuple(self.listener.extensionFilter)): + raise Exception( + "This file extension is excluded by extension filter!" + ) + mime_type = get_mime_type(self._path) + link = self._upload_file( + self._path, + self.listener.name, + mime_type, + self.listener.upDest, + ft_delete, + in_dir=False, + ) + if self.listener.isCancelled: + return + if link is None: + raise Exception("Upload has been manually cancelled") + LOGGER.info(f"Uploaded To G-Drive: {self._path}") + else: + mime_type = "Folder" + dir_id = self.create_directory( + ospath.basename(ospath.abspath(self.listener.name)), + self.listener.upDest, + ) + result = self._upload_dir( + self._path, dir_id, unwanted_files, ft_delete + ) + if result is None: + raise Exception("Upload has been manually cancelled!") + link = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) + if self.listener.isCancelled: + return + LOGGER.info(f"Uploaded To G-Drive: {self.listener.name}") + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + async_to_sync(self.listener.onUploadError, err) + self._is_errored = True + finally: + self._updater.cancel() + if self.listener.isCancelled and not self._is_errored: + if mime_type == "Folder": + LOGGER.info("Deleting uploaded data from Drive...") + self.service.files().delete( + fileId=dir_id, supportsAllDrives=True + ).execute() + return + if self._is_errored: + return + async_to_sync( + self.listener.onUploadComplete, + link, + self.total_files, + self.total_folders, + mime_type, + dir_id=self.getIdFromUrl(link), + ) + + def _upload_dir(self, input_directory, dest_id, unwanted_files, ft_delete): + list_dirs = listdir(input_directory) + if len(list_dirs) == 0: + return dest_id + new_id = None + for item in list_dirs: + current_file_name = ospath.join(input_directory, item) + if ospath.isdir(current_file_name): + current_dir_id = self.create_directory(item, dest_id) + new_id = self._upload_dir( + current_file_name, current_dir_id, unwanted_files, ft_delete + ) + self.total_folders += 1 + elif ( + current_file_name not in unwanted_files + and not item.lower().endswith(tuple(self.listener.extensionFilter)) + ): + mime_type = get_mime_type(current_file_name) + file_name = current_file_name.split("/")[-1] + self._upload_file( + current_file_name, file_name, mime_type, dest_id, ft_delete + ) + self.total_files += 1 + new_id = dest_id + else: + if not self.listener.seed or self.listener.newDir: + remove(current_file_name) + new_id = "filter" + if self.listener.isCancelled: + break + return new_id + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=(retry_if_exception_type(Exception)), + ) + def _upload_file( + self, file_path, file_name, mime_type, dest_id, ft_delete, in_dir=True + ): + file_metadata = { + "name": file_name, + "description": "Uploaded by @ProjectAeon", + "mimeType": mime_type, + } + if dest_id is not None: + file_metadata["parents"] = [dest_id] + + if ospath.getsize(file_path) == 0: + media_body = MediaFileUpload( + file_path, mimetype=mime_type, resumable=False + ) + response = ( + self.service.files() + .create( + body=file_metadata, media_body=media_body, supportsAllDrives=True + ) + .execute() + ) + if not config_dict["IS_TEAM_DRIVE"]: + self.set_permission(response["id"]) + + drive_file = ( + self.service.files() + .get(fileId=response["id"], supportsAllDrives=True) + .execute() + ) + return self.G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) + media_body = MediaFileUpload( + file_path, + mimetype=mime_type, + resumable=True, + chunksize=100 * 1024 * 1024, + ) + + drive_file = self.service.files().create( + body=file_metadata, media_body=media_body, supportsAllDrives=True + ) + response = None + retries = 0 + while response is None and not self.listener.isCancelled: + try: + self.status, response = drive_file.next_chunk() + except HttpError as err: + if err.resp.status in [500, 502, 503, 504, 429] and retries < 10: + retries += 1 + continue + if err.resp.get("content-type", "").startswith("application/json"): + reason = ( + eval(err.content).get("error").get("errors")[0].get("reason") + ) + if reason not in [ + "userRateLimitExceeded", + "dailyLimitExceeded", + ]: + raise err + if self.use_sa: + if self.sa_count >= self.sa_number: + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}" + ) + raise err + if self.listener.isCancelled: + return None + self.switchServiceAccount() + LOGGER.info(f"Got: {reason}, Trying Again.") + return self._upload_file( + file_path, + file_name, + mime_type, + dest_id, + ft_delete, + in_dir, + ) + LOGGER.error(f"Got: {reason}") + raise err + if self.listener.isCancelled: + return None + if not self.listener.seed or self.listener.newDir or file_path in ft_delete: + with suppress(Exception): + remove(file_path) + self.file_processed_bytes = 0 + if not config_dict["IS_TEAM_DRIVE"]: + self.set_permission(response["id"]) + if not in_dir: + drive_file = ( + self.service.files() + .get(fileId=response["id"], supportsAllDrives=True) + .execute() + ) + return self.G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) + return None diff --git a/bot/helper/mirror_leech_utils/rclone_utils/list.py b/bot/helper/mirror_leech_utils/rclone_utils/list.py index a3a6cce69..ef71826e7 100644 --- a/bot/helper/mirror_leech_utils/rclone_utils/list.py +++ b/bot/helper/mirror_leech_utils/rclone_utils/list.py @@ -1,6 +1,6 @@ from json import loads from time import time -from asyncio import Event, wait_for, wrap_future +from asyncio import Event, gather, wait_for, wrap_future from functools import partial from configparser import ConfigParser @@ -14,12 +14,19 @@ cmd_exec, new_task, new_thread, + update_user_ldata, +) +from bot.helper.ext_utils.db_handler import Database +from bot.helper.ext_utils.status_utils import ( get_readable_time, get_readable_file_size, ) -from bot.helper.ext_utils.db_handler import DbManager from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import edit_message, send_message +from bot.helper.telegram_helper.message_utils import ( + edit_message, + send_message, + delete_message, +) LIST_LIMIT = 6 @@ -32,9 +39,9 @@ async def path_updates(_, query, obj): if data[1] == "cancel": obj.remote = "Task has been cancelled!" obj.path = "" - obj.is_cancelled = True + obj.listener.isCancelled = True obj.event.set() - await message.delete() + await delete_message(message) return if obj.query_proc: return @@ -64,7 +71,7 @@ async def path_updates(_, query, obj): if data[2] == "fo": await obj.get_path() else: - await message.delete() + await delete_message(message) obj.event.set() elif data[1] == "ps": if obj.page_step == int(data[2]): @@ -78,21 +85,20 @@ async def path_updates(_, query, obj): obj.item_type = data[2] await obj.get_path() elif data[1] == "cur": - await message.delete() + await delete_message(message) obj.event.set() elif data[1] == "def": path = ( f"{obj.remote}{obj.path}" - if obj.config_path == "rcl.conf" + if obj.config_path == "rclone.conf" else f"mrcc:{obj.remote}{obj.path}" ) - if path != config_dict["RCLONE_PATH"]: - config_dict["RCLONE_PATH"] = path + if path != obj.listener.userDict.get("rclone_path"): + update_user_ldata(obj.listener.userId, "rclone_path", path) await obj.get_path_buttons() - if config_dict["DATABASE_URL"]: - await DbManager().update_config({"RCLONE_PATH": path}) + await Database().update_user_data(obj.listener.userId) elif data[1] == "owner": - obj.config_path = "rcl.conf" + obj.config_path = "rclone.conf" obj.path = "" obj.remote = "" await obj.list_remotes() @@ -105,22 +111,19 @@ async def path_updates(_, query, obj): class RcloneList: - def __init__(self, client, message): - self.__user_id = message.from_user.id - self.__rc_user = False - self.__rc_owner = False - self.__client = client - self.__message = message - self.__sections = [] - self.__reply_to = None - self.__time = time() - self.__timeout = 240 + def __init__(self, listener): + self._rc_user = False + self._rc_owner = False + self._sections = [] + self._reply_to = None + self._time = time() + self._timeout = 240 + self.listener = listener self.remote = "" - self.is_cancelled = False self.query_proc = False self.item_type = "--dirs-only" self.event = Event() - self.user_rcc_path = f"tanha/{self.__user_id}.conf" + self.user_rcc_path = f"rclone/{self.listener.userId}.conf" self.config_path = "" self.path = "" self.list_status = "" @@ -129,30 +132,32 @@ def __init__(self, client, message): self.page_step = 1 @new_thread - async def __event_handler(self): + async def _event_handler(self): pfunc = partial(path_updates, obj=self) - handler = self.__client.add_handler( + handler = self.listener.client.add_handler( CallbackQueryHandler( - pfunc, filters=regex("^rcq") & user(self.__user_id) + pfunc, filters=regex("^rcq") & user(self.listener.userId) ), group=-1, ) try: - await wait_for(self.event.wait(), timeout=self.__timeout) + await wait_for(self.event.wait(), timeout=self._timeout) except Exception: self.path = "" self.remote = "Timed Out. Task has been cancelled!" - self.is_cancelled = True + self.listener.isCancelled = True self.event.set() finally: - self.__client.remove_handler(*handler) + self.listener.client.remove_handler(*handler) - async def __send_list_message(self, msg, button): - if not self.is_cancelled: - if self.__reply_to is None: - self.__reply_to = await send_message(self.__message, msg, button) + async def _send_list_message(self, msg, button): + if not self.listener.isCancelled: + if self._reply_to is None: + self._reply_to = await send_message( + self.listener.message, msg, button + ) else: - await edit_message(self.__reply_to, msg, button) + await edit_message(self._reply_to, msg, button) async def get_path_buttons(self): items_no = len(self.path_list) @@ -192,21 +197,16 @@ async def get_path_buttons(self): buttons.callback("Choose Current Path", "rcq cur", position="footer") if self.list_status == "rcu": buttons.callback("Set as Default Path", "rcq def", position="footer") - if ( - self.path - or len(self.__sections) > 1 - or self.__rc_user - and self.__rc_owner - ): + if self.path or len(self._sections) > 1 or self._rc_user and self._rc_owner: buttons.callback("Back", "rcq back pa", position="footer") if self.path: buttons.callback("Back To Root", "rcq root", position="footer") buttons.callback("Cancel", "rcq cancel", position="footer") - button = buttons.column(2) + button = buttons.menu(f_cols=2) msg = "Choose Path:" + ( - "\nTransfer Type: Download" + "\nTransfer Type: Download" if self.list_status == "rcd" - else "\nTransfer Type: Upload" + else "\nTransfer Type: Upload" ) if self.list_status == "rcu": default_path = config_dict["RCLONE_PATH"] @@ -216,8 +216,10 @@ async def get_path_buttons(self): msg += f" | Page: {int(page)}/{pages} | Page Step: {self.page_step}" msg += f"\n\nItem Type: {self.item_type}\nConfig Path: {self.config_path}" msg += f"\nCurrent Path:{self.remote}{self.path}
" - msg += f"\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}" - await self.__send_list_message(msg, button) + msg += ( + f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + ) + await self._send_list_message(msg, button) async def get_path(self, itype=""): if itype: @@ -235,10 +237,12 @@ async def get_path(self, itype=""): self.config_path, f"{self.remote}{self.path}", ] - if self.is_cancelled: + if self.listener.isCancelled: return None res, err, code = await cmd_exec(cmd) if code not in [0, -9]: + if not err: + err = "Use/shell cat rlog.txt
to see more information" LOGGER.error( f"While rclone listing. Path: {self.remote}{self.path}. Stderr: {err}" ) @@ -269,43 +273,45 @@ async def list_remotes(self): config.read_string(contents) if config.has_section("combine"): config.remove_section("combine") - self.__sections = config.sections() - if len(self.__sections) == 1: - self.remote = f"{self.__sections[0]}:" + self._sections = config.sections() + if len(self._sections) == 1: + self.remote = f"{self._sections[0]}:" await self.get_path() else: msg = "Choose Rclone remote:" + ( - "\nTransfer Type: Download" + "\nTransfer Type: Download" if self.list_status == "rcd" - else "\nTransfer Type: Upload" + else "\nTransfer Type: Upload" ) msg += f"\nConfig Path: {self.config_path}" - msg += f"\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}" + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" buttons = ButtonMaker() - for remote in self.__sections: + for remote in self._sections: buttons.callback(remote, f"rcq re {remote}:") - if self.__rc_user and self.__rc_owner: + if self._rc_user and self._rc_owner: buttons.callback("Back", "rcq back re", position="footer") buttons.callback("Cancel", "rcq cancel", position="footer") - button = buttons.column(2) - await self.__send_list_message(msg, button) + button = buttons.menu(2) + await self._send_list_message(msg, button) async def list_config(self): - if self.__rc_user and self.__rc_owner: + if self._rc_user and self._rc_owner: msg = "Choose Rclone config:" + ( "\nTransfer Type: Download" if self.list_status == "rcd" else "\nTransfer Type: Upload" ) - msg += f"\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}" + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" buttons = ButtonMaker() buttons.callback("Owner Config", "rcq owner") buttons.callback("My Config", "rcq user") buttons.callback("Cancel", "rcq cancel") - button = buttons.column(2) - await self.__send_list_message(msg, button) + button = buttons.menu(2) + await self._send_list_message(msg, button) else: - self.config_path = "rcl.conf" if self.__rc_owner else self.user_rcc_path + self.config_path = ( + "rclone.conf" if self._rc_owner else self.user_rcc_path + ) await self.list_remotes() async def back_from_path(self): @@ -313,18 +319,19 @@ async def back_from_path(self): path = self.path.rsplit("/", 1) self.path = path[0] if len(path) > 1 else "" await self.get_path() - elif len(self.__sections) > 1: + elif len(self._sections) > 1: await self.list_remotes() else: await self.list_config() async def get_rclone_path(self, status, config_path=None): self.list_status = status - future = self.__event_handler() + future = self._event_handler() if config_path is None: - self.__rc_user = await aiopath.exists(self.user_rcc_path) - self.__rc_owner = await aiopath.exists("rcl.conf") - if not self.__rc_owner and not self.__rc_user: + self._rc_user, self._rc_owner = await gather( + aiopath.exists(self.user_rcc_path), aiopath.exists("rclone.conf") + ) + if not self._rc_owner and not self._rc_user: self.event.set() return "Rclone Config not Exists!" await self.list_config() @@ -332,7 +339,7 @@ async def get_rclone_path(self, status, config_path=None): self.config_path = config_path await self.list_remotes() await wrap_future(future) - await self.__reply_to.delete() - if self.config_path != "rcl.conf" and not self.is_cancelled: + await delete_message(self._reply_to) + if self.config_path != "rclone.conf" and not self.listener.isCancelled: return f"mrcc:{self.remote}{self.path}" return f"{self.remote}{self.path}" diff --git a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py index 7b3091234..22e124c48 100644 --- a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py +++ b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py @@ -1,64 +1,67 @@ -import contextlib from re import findall as re_findall from json import loads from random import randrange from asyncio import gather, create_subprocess_exec from logging import getLogger +from contextlib import suppress from configparser import ConfigParser from asyncio.subprocess import PIPE from aiofiles import open as aiopen from aiofiles.os import path as aiopath -from aiofiles.os import mkdir, listdir +from aiofiles.os import listdir, makedirs -from bot import GLOBAL_EXTENSION_FILTER, config_dict +from bot import config_dict from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async -from bot.helper.ext_utils.files_utils import get_mime_type, count_files_and_folders +from bot.helper.ext_utils.files_utils import ( + get_mime_type, + clean_unwanted, + count_files_and_folders, +) LOGGER = getLogger(__name__) class RcloneTransferHelper: - def __init__(self, listener=None, name=""): - self.__listener = listener - self.__proc = None - self.__transferred_size = "0 B" - self.__eta = "-" - self.__percentage = "0%" - self.__speed = "0 B/s" - self.__size = "0 B" - self.__is_cancelled = False - self.__is_download = False - self.__is_upload = False - self.__sa_count = 1 - self.__sa_index = 0 - self.__sa_number = 0 - self.name = name + def __init__(self, listener): + self._listener = listener + self._proc = None + self._transferred_size = "0 B" + self._eta = "-" + self._percentage = "0%" + self._speed = "0 B/s" + self._size = "0 B" + self._is_download = False + self._is_upload = False + self._sa_count = 1 + self._sa_index = 0 + self._sa_number = 0 + self._use_service_accounts = config_dict["USE_SA"] @property def transferred_size(self): - return self.__transferred_size + return self._transferred_size @property def percentage(self): - return self.__percentage + return self._percentage @property def speed(self): - return self.__speed + return self._speed @property def eta(self): - return self.__eta + return self._eta @property def size(self): - return self.__size + return self._size - async def __progress(self): - while not (self.__proc is None or self.__is_cancelled): + async def _progress(self): + while not (self._proc is None or self._listener.isCancelled): try: - data = (await self.__proc.stdout.readline()).decode() + data = (await self._proc.stdout.readline()).decode() except Exception: continue if not data: @@ -68,37 +71,37 @@ async def __progress(self): data, ): ( - self.__transferred_size, - self.__size, - self.__percentage, - self.__speed, - self.__eta, + self._transferred_size, + self._size, + self._percentage, + self._speed, + self._eta, ) = data[0] - def __switchServiceAccount(self): - if self.__sa_index == self.__sa_number - 1: - self.__sa_index = 0 + def _switchServiceAccount(self): + if self._sa_index == self._sa_number - 1: + self._sa_index = 0 else: - self.__sa_index += 1 - self.__sa_count += 1 - remote = f"sa{self.__sa_index:03}" + self._sa_index += 1 + self._sa_count += 1 + remote = f"sa{self._sa_index:03}" LOGGER.info(f"Switching to {remote} remote") return remote - async def __create_rc_sa(self, remote, remote_opts): + async def _create_rc_sa(self, remote, remote_opts): sa_conf_dir = "rclone_sa" sa_conf_file = f"{sa_conf_dir}/{remote}.conf" - if not await aiopath.isdir(sa_conf_dir): - await mkdir(sa_conf_dir) - elif await aiopath.isfile(sa_conf_file): + if await aiopath.isfile(sa_conf_file): return sa_conf_file + await makedirs(sa_conf_dir, exist_ok=True) if gd_id := remote_opts.get("team_drive"): option = "team_drive" elif gd_id := remote_opts.get("root_folder_id"): option = "root_folder_id" else: - return "rcl.conf" + self._use_service_accounts = False + return "rclone.conf" files = await listdir("accounts") text = "".join( @@ -110,98 +113,97 @@ async def __create_rc_sa(self, remote, remote_opts): await f.write(text) return sa_conf_file - async def __start_download(self, cmd, remote_type): - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) + async def _start_download(self, cmd, remote_type): + self._proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + _, return_code = await gather(self._progress(), self._proc.wait()) - if self.__is_cancelled: + if self._listener.isCancelled: return None if return_code == 0: - await self.__listener.on_download_complete() + await self._listener.on_download_complete() return None if return_code != -9: - error = (await self.__proc.stderr.read()).decode().strip() - if ( - not error - and remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - ): + error = (await self._proc.stderr.read()).decode().strip() + if not error and remote_type == "drive" and self._use_service_accounts: error = ( "Mostly your service accounts don't have access to this drive!" ) + elif not error: + error = ( + "Use/shell cat rlog.txt
to see more information" + ) LOGGER.error(error) if ( - self.__sa_number != 0 + self._sa_number != 0 and remote_type == "drive" and "RATE_LIMIT_EXCEEDED" in error - and config_dict["USE_SERVICE_ACCOUNTS"] + and self._use_service_accounts ): - if self.__sa_count < self.__sa_number: - remote = self.__switchServiceAccount() + if self._sa_count < self._sa_number: + remote = self._switchServiceAccount() cmd[6] = f"{remote}:{cmd[6].split(':', 1)[1]}" - if self.__is_cancelled: + if self._listener.isCancelled: return None - return await self.__start_download(cmd, remote_type) + return await self._start_download(cmd, remote_type) LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" + f"Reached maximum number of service accounts switching, which is {self._sa_count}" ) - await self.__listener.onDownloadError(error[:4000]) + await self._listener.onDownloadError(error[:4000]) return None return None - async def download(self, remote, rc_path, config_path, path): - self.__is_download = True + async def download(self, remote, config_path, path): + self._is_download = True try: - remote_opts = await self.__get_remote_options(config_path, remote) + remote_opts = await self._get_remote_options(config_path, remote) except Exception as err: - await self.__listener.onDownloadError(str(err)) + await self._listener.onDownloadError(str(err)) return remote_type = remote_opts["type"] if ( remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - and config_path == "rcl.conf" + and self._use_service_accounts + and config_path == "rclone.conf" and await aiopath.isdir("accounts") and not remote_opts.get("service_account_file") ): - config_path = await self.__create_rc_sa(remote, remote_opts) - if config_path != "rcl.conf": + config_path = await self._create_rc_sa(remote, remote_opts) + if config_path != "rclone.conf": sa_files = await listdir("accounts") - self.__sa_number = len(sa_files) - self.__sa_index = randrange(self.__sa_number) - remote = f"sa{self.__sa_index:03}" + self._sa_number = len(sa_files) + self._sa_index = randrange(self._sa_number) + remote = f"sa{self._sa_index:03}" LOGGER.info(f"Download with service account {remote}") - rc_flags = self.__listener.rc_flags or config_dict["RCLONE_FLAGS"] - cmd = self.__getUpdatedCommand( - config_path, f"{remote}:{rc_path}", path, rc_flags, "copy" + cmd = self._getUpdatedCommand( + config_path, f"{remote}:{self._listener.link}", path, "copy" ) if ( remote_type == "drive" and not config_dict["RCLONE_FLAGS"] - and not self.__listener.rc_flags + and not self._listener.rcFlags ): cmd.append("--drive-acknowledge-abuse") elif remote_type != "drive": cmd.extend(("--retries-sleep", "3s")) - await self.__start_download(cmd, remote_type) + await self._start_download(cmd, remote_type) - async def __get_gdrive_link(self, config_path, remote, rc_path, mime_type): + async def _get_gdrive_link(self, config_path, remote, rc_path, mime_type): if mime_type == "Folder": epath = rc_path.strip("/").rsplit("/", 1) epath = f"{remote}:{epath[0]}" if len(epath) > 1 else f"{remote}:" destination = f"{remote}:{rc_path}" elif rc_path: - epath = f"{remote}:{rc_path}/{self.name}" + epath = f"{remote}:{rc_path}/{self._listener.name}" destination = epath else: - epath = f"{remote}:{rc_path}{self.name}" + epath = f"{remote}:{rc_path}{self._listener.name}" destination = epath cmd = [ @@ -218,78 +220,82 @@ async def __get_gdrive_link(self, config_path, remote, rc_path, mime_type): if code == 0: result = loads(res) - fid = next((r["ID"] for r in result if r["Path"] == self.name), "err") + fid = next( + (r["ID"] for r in result if r["Path"] == self._listener.name), "err" + ) link = ( f"https://drive.google.com/drive/folders/{fid}" if mime_type == "Folder" else f"https://drive.google.com/uc?id={fid}&export=download" ) elif code != -9: + if not err: + err = "Use/shell cat rlog.txt
to see more information" LOGGER.error( f"while getting drive link. Path: {destination}. Stderr: {err}" ) link = "" return link, destination - async def __start_upload(self, cmd, remote_type): - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) + async def _start_upload(self, cmd, remote_type): + self._proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + _, return_code = await gather(self._progress(), self._proc.wait()) - if self.__is_cancelled: + if self._listener.isCancelled: return False if return_code == -9: return False if return_code != 0: - error = (await self.__proc.stderr.read()).decode().strip() - if ( - not error - and remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - ): + error = (await self._proc.stderr.read()).decode().strip() + if not error and remote_type == "drive" and self._use_service_accounts: + error = "Mostly your service accounts don't have access to this drive or RATE_LIMIT_EXCEEDED" + elif not error: error = ( - "Mostly your service accounts don't have access to this drive!" + "Use/shell cat rlog.txt
to see more information" ) LOGGER.error(error) if ( - self.__sa_number != 0 + self._sa_number != 0 and remote_type == "drive" and "RATE_LIMIT_EXCEEDED" in error - and config_dict["USE_SERVICE_ACCOUNTS"] + and self._use_service_accounts ): - if self.__sa_count < self.__sa_number: - remote = self.__switchServiceAccount() + if self._sa_count < self._sa_number: + remote = self._switchServiceAccount() cmd[7] = f"{remote}:{cmd[7].split(':', 1)[1]}" return ( False - if self.__is_cancelled - else await self.__start_upload(cmd, remote_type) + if self._listener.isCancelled + else await self._start_upload(cmd, remote_type) ) LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" + f"Reached maximum number of service accounts switching, which is {self._sa_count}" ) - await self.__listener.onUploadError(error[:4000]) + await self._listener.onUploadError(error[:4000]) return False return True - async def upload(self, path, size): - self.__is_upload = True - rc_path = self.__listener.upPath.strip("/") + async def upload(self, path, unwanted_files, ft_delete): + self._is_upload = True + rc_path = self._listener.upDest.strip("/") if rc_path.startswith("mrcc:"): rc_path = rc_path.split("mrcc:", 1)[1] - oconfig_path = f"tanha/{self.__listener.message.from_user.id}.conf" + oconfig_path = f"rclone/{self._listener.userId}.conf" else: - oconfig_path = "rcl.conf" + oconfig_path = "rclone.conf" oremote, rc_path = rc_path.split(":", 1) if await aiopath.isdir(path): mime_type = "Folder" - folders, files = await count_files_and_folders(path) - rc_path += f"/{self.name}" if rc_path else self.name + folders, files = await count_files_and_folders( + path, self._listener.extensionFilter, unwanted_files + ) + rc_path += f"/{self._listener.name}" if rc_path else self._listener.name else: - if path.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - await self.__listener.onUploadError( + if path.lower().endswith(tuple(self._listener.extensionFilter)): + await self._listener.onUploadError( "This file extension is excluded by extension filter!" ) return @@ -298,9 +304,9 @@ async def upload(self, path, size): files = 1 try: - remote_opts = await self.__get_remote_options(oconfig_path, oremote) + remote_opts = await self._get_remote_options(oconfig_path, oremote) except Exception as err: - await self.__listener.onUploadError(str(err)) + await self._listener.onUploadError(str(err)) return remote_type = remote_opts["type"] @@ -308,50 +314,49 @@ async def upload(self, path, size): fconfig_path = oconfig_path if ( remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - and fconfig_path == "rcl.conf" + and self._use_service_accounts + and fconfig_path == "rclone.conf" and await aiopath.isdir("accounts") and not remote_opts.get("service_account_file") ): - fconfig_path = await self.__create_rc_sa(oremote, remote_opts) - if fconfig_path != "rcl.conf": + fconfig_path = await self._create_rc_sa(oremote, remote_opts) + if fconfig_path != "rclone.conf": sa_files = await listdir("accounts") - self.__sa_number = len(sa_files) - self.__sa_index = randrange(self.__sa_number) - fremote = f"sa{self.__sa_index:03}" + self._sa_number = len(sa_files) + self._sa_index = randrange(self._sa_number) + fremote = f"sa{self._sa_index:03}" LOGGER.info(f"Upload with service account {fremote}") - rc_flags = self.__listener.rc_flags or config_dict["RCLONE_FLAGS"] method = ( - "move" if not self.__listener.seed or self.__listener.newDir else "copy" + "move" if not self._listener.seed or self._listener.newDir else "copy" ) - cmd = self.__getUpdatedCommand( - fconfig_path, path, f"{fremote}:{rc_path}", rc_flags, method + cmd = self._getUpdatedCommand( + fconfig_path, path, f"{fremote}:{rc_path}", method, unwanted_files ) if ( remote_type == "drive" and not config_dict["RCLONE_FLAGS"] - and not self.__listener.rc_flags + and not self._listener.rcFlags ): - cmd.extend(("--drive-chunk-size", "64M", "--drive-upload-cutoff", "32M")) - elif remote_type != "drive": - cmd.extend(("--retries-sleep", "3s")) + cmd.extend( + ("--drive-chunk-size", "128M", "--drive-upload-cutoff", "128M") + ) - result = await self.__start_upload(cmd, remote_type) + result = await self._start_upload(cmd, remote_type) if not result: return if remote_type == "drive": - link, destination = await self.__get_gdrive_link( + link, destination = await self._get_gdrive_link( oconfig_path, oremote, rc_path, mime_type ) else: if mime_type == "Folder": destination = f"{oremote}:{rc_path}" elif rc_path: - destination = f"{oremote}:{rc_path}/{self.name}" + destination = f"{oremote}:{rc_path}/{self._listener.name}" else: - destination = f"{oremote}:{self.name}" + destination = f"{oremote}:{self._listener.name}" cmd = ["xone", "link", "--config", oconfig_path, destination] res, err, code = await cmd_exec(cmd) @@ -359,29 +364,32 @@ async def upload(self, path, size): if code == 0: link = res elif code != -9: + if not err: + err = "Use/shell cat rlog.txt
to see more information" LOGGER.error( f"while getting link. Path: {destination} | Stderr: {err}" ) link = "" - if self.__is_cancelled: + if self._listener.isCancelled: return LOGGER.info(f"Upload Done. Path: {destination}") - await self.__listener.onUploadComplete( - link, size, files, folders, mime_type, self.name, destination + if self._listener.seed and not self._listener.newDir: + await clean_unwanted(path, ft_delete) + await self._listener.onUploadComplete( + link, files, folders, mime_type, destination ) - async def clone( - self, config_path, src_remote, src_path, destination, rc_flags, mime_type - ): + async def clone(self, config_path, src_remote, src_path, mime_type, method): + destination = self._listener.upDest dst_remote, dst_path = destination.split(":", 1) try: src_remote_opts, dst_remote_opt = await gather( - self.__get_remote_options(config_path, src_remote), - self.__get_remote_options(config_path, dst_remote), + self._get_remote_options(config_path, src_remote), + self._get_remote_options(config_path, dst_remote), ) except Exception as err: - await self.__listener.onUploadError(str(err)) + await self._listener.onUploadError(str(err)) return None, None src_remote_type, dst_remote_type = ( @@ -389,57 +397,64 @@ async def clone( dst_remote_opt["type"], ) - cmd = self.__getUpdatedCommand( - config_path, f"{src_remote}:{src_path}", destination, rc_flags, "copy" + cmd = self._getUpdatedCommand( + config_path, f"{src_remote}:{src_path}", destination, method ) - if not rc_flags: + if not self._listener.rcFlags and not config_dict["RCLONE_FLAGS"]: if src_remote_type == "drive" and dst_remote_type != "drive": cmd.append("--drive-acknowledge-abuse") - elif dst_remote_type == "drive" and src_remote_type != "drive": - cmd.extend( - ("--drive-chunk-size", "64M", "--drive-upload-cutoff", "32M") - ) elif src_remote_type == "drive": cmd.extend(("--tpslimit", "3", "--transfers", "3")) - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) + self._proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + _, return_code = await gather(self._progress(), self._proc.wait()) - if self.__is_cancelled: + if self._listener.isCancelled: return None, None if return_code == -9: return None, None if return_code != 0: - error = (await self.__proc.stderr.read()).decode().strip() + error = ( + (await self._proc.stderr.read()).decode().strip() + or "Use/shell cat rlog.txt
to see more information" + ) LOGGER.error(error) - await self.__listener.onUploadError(error[:4000]) + await self._listener.onUploadError(error[:4000]) return None, None if dst_remote_type == "drive": - link, destination = await self.__get_gdrive_link( + link, destination = await self._get_gdrive_link( config_path, dst_remote, dst_path, mime_type ) - return (None, None) if self.__is_cancelled else (link, destination) + return ( + (None, None) if self._listener.isCancelled else (link, destination) + ) if mime_type != "Folder": - destination += f"/{self.name}" if dst_path else self.name + destination += ( + f"/{self._listener.name}" if dst_path else self._listener.name + ) cmd = ["xone", "link", "--config", config_path, destination] res, err, code = await cmd_exec(cmd) - if self.__is_cancelled: + if self._listener.isCancelled: return None, None if code == 0: return res, destination if code != -9: + if not err: + err = "Use/shell cat rlog.txt
to see more information" LOGGER.error(f"while getting link. Path: {destination} | Stderr: {err}") - await self.__listener.onUploadError(err[:4000]) - return None, None + return None, destination return None - @staticmethod - def __getUpdatedCommand(config_path, source, destination, rc_flags, method): - ext = "*.{" + ",".join(GLOBAL_EXTENSION_FILTER) + "}" + def _getUpdatedCommand( + self, config_path, source, destination, method, unwanted_files=None + ): + if unwanted_files is None: + unwanted_files = [] + ext = "*.{" + ",".join(self._listener.extensionFilter) + "}" cmd = [ "xone", method, @@ -451,6 +466,8 @@ def __getUpdatedCommand(config_path, source, destination, rc_flags, method): destination, "--exclude", ext, + "--retries-sleep", + "3s", "--ignore-case", "--low-level-retries", "1", @@ -460,18 +477,21 @@ def __getUpdatedCommand(config_path, source, destination, rc_flags, method): "--log-level", "DEBUG", ] - if rc_flags: - rc_flags = rc_flags.split("|") - for flag in rc_flags: + if rcflags := self._listener.rcFlags or config_dict["RCLONE_FLAGS"]: + rcflags = rcflags.split("|") + for flag in rcflags: if ":" in flag: key, value = map(str.strip, flag.split(":", 1)) cmd.extend((key, value)) elif len(flag) > 0: cmd.append(flag.strip()) + if unwanted_files: + for f in unwanted_files: + cmd.extend(("--exclude", f.rsplit("/", 1)[1])) return cmd @staticmethod - async def __get_remote_options(config_path, remote): + async def _get_remote_options(config_path, remote): config = ConfigParser() async with aiopen(config_path) as f: contents = await f.read() @@ -479,17 +499,17 @@ async def __get_remote_options(config_path, remote): options = config.options(remote) return {opt: config.get(remote, opt) for opt in options} - async def cancel_download(self): - self.__is_cancelled = True - if self.__proc is not None: - with contextlib.suppress(Exception): - self.__proc.kill() - if self.__is_download: - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError("Stopped by user!") - elif self.__is_upload: - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError("Cancelled by user!") + async def cancel_task(self): + self._listener.isCancelled = True + if self._proc is not None: + with suppress(Exception): + self._proc.kill() + if self._is_download: + LOGGER.info(f"Cancelling Download: {self._listener.name}") + await self._listener.onDownloadError("Download stopped by user!") + elif self._is_upload: + LOGGER.info(f"Cancelling Upload: {self._listener.name}") + await self._listener.onUploadError("your upload has been stopped!") else: - LOGGER.info(f"Cancelling Clone: {self.name}") - await self.__listener.onUploadError("Your clone has been stopped!") + LOGGER.info(f"Cancelling Clone: {self._listener.name}") + await self._listener.onUploadError("your clone has been stopped!") diff --git a/bot/helper/mirror_leech_utils/status_utils/aria2_status.py b/bot/helper/mirror_leech_utils/status_utils/aria2_status.py index 36f37f918..b7d7abd73 100644 --- a/bot/helper/mirror_leech_utils/status_utils/aria2_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/aria2_status.py @@ -1,114 +1,107 @@ from time import time from bot import LOGGER, aria2 -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - sync_to_async, - get_readable_time, -) +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_time -def get_download(gid): +def get_download(gid, old_info=None): try: - return aria2.get_download(gid) + res = aria2.get_download(gid) + return res or old_info except Exception as e: LOGGER.error(f"{e}: Aria2c, Error while getting torrent info") - return None + return old_info class Aria2Status: - def __init__(self, gid, listener, seeding=False, queued=False): - self.__gid = gid - self.__download = get_download(gid) - self.__listener = listener + def __init__(self, listener, gid, seeding=False, queued=False): + self._gid = gid + self._download = None + self.listener = listener self.queued = queued self.start_time = 0 self.seeding = seeding - self.message = self.__listener.message + self.message = listener.message - def __update(self): - if self.__download is None: - self.__download = get_download(self.__gid) + def update(self): + if self._download is None: + self._download = get_download(self._gid, self._download) else: - self.__download = self.__download.live - if self.__download.followed_by_ids: - self.__gid = self.__download.followed_by_ids[0] - self.__download = get_download(self.__gid) + self._download = self._download.live + if self._download.followed_by_ids: + self._gid = self._download.followed_by_ids[0] + self._download = get_download(self._gid) def progress(self): - return self.__download.progress_string() + return self._download.progress_string() def processed_bytes(self): - return self.__download.completed_length_string() + return self._download.completed_length_string() def speed(self): - return self.__download.download_speed_string() + return self._download.download_speed_string() def name(self): - return self.__download.name + return self._download.name def size(self): - return self.__download.total_length_string() + return self._download.total_length_string() def eta(self): - return get_readable_time(int(self.__download.eta.total_seconds())) + return get_readable_time(int(self._download.eta.total_seconds())) def status(self): - self.__update() - if self.__download.is_waiting or self.queued: + self.update() + if self._download.is_waiting or self.queued: if self.seeding: return MirrorStatus.STATUS_QUEUEUP return MirrorStatus.STATUS_QUEUEDL - if self.__download.is_paused: + if self._download.is_paused: return MirrorStatus.STATUS_PAUSED - if self.__download.seeder and self.seeding: + if self._download.seeder and self.seeding: return MirrorStatus.STATUS_SEEDING - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_DOWNLOADING_A def seeders_num(self): - return self.__download.num_seeders + return self._download.num_seeders def leechers_num(self): - return self.__download.connections + return self._download.connections def uploaded_bytes(self): - return self.__download.upload_length_string() + return self._download.upload_length_string() - def upload_speed(self): - self.__update() - return self.__download.upload_speed_string() + def seed_speed(self): + return self._download.upload_speed_string() def ratio(self): - return f"{round(self.__download.upload_length / self.__download.completed_length, 3)}" + return f"{round(self._download.upload_length / self._download.completed_length, 3)}" def seeding_time(self): - return get_readable_time(time() - self.start_time, True) + return get_readable_time(time() - self.start_time) - def download(self): + def task(self): return self - def listener(self): - return self.__listener - def gid(self): - self.__update() - return self.__gid + return self._gid - async def cancel_download(self): - self.__update() - await sync_to_async(self.__update) - if self.__download.seeder and self.seeding: + async def cancel_task(self): + self.listener.isCancelled = True + await sync_to_async(self.update) + if self._download.seeder and self.seeding: LOGGER.info(f"Cancelling Seed: {self.name()}") - await self.__listener.onUploadError( + await self.listener.onUploadError( f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}" ) await sync_to_async( - aria2.remove, [self.__download], force=True, files=True + aria2.remove, [self._download], force=True, files=True ) - elif downloads := self.__download.followed_by: + elif downloads := self._download.followed_by: LOGGER.info(f"Cancelling Download: {self.name()}") - await self.__listener.onDownloadError("Download cancelled by user!") - downloads.append(self.__download) + await self.listener.onDownloadError("Download cancelled by user!") + downloads.append(self._download) await sync_to_async(aria2.remove, downloads, force=True, files=True) else: if self.queued: @@ -117,7 +110,7 @@ async def cancel_download(self): else: LOGGER.info(f"Cancelling Download: {self.name()}") msg = "Download stopped by user!" - await self.__listener.onDownloadError(msg) + await self.listener.onDownloadError(msg) await sync_to_async( - aria2.remove, [self.__download], force=True, files=True + aria2.remove, [self._download], force=True, files=True ) diff --git a/bot/helper/mirror_leech_utils/status_utils/direct_status.py b/bot/helper/mirror_leech_utils/status_utils/direct_status.py index 0740a80dd..af3ee4cdf 100644 --- a/bot/helper/mirror_leech_utils/status_utils/direct_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/direct_status.py @@ -1,4 +1,4 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, get_readable_time, get_readable_file_size, @@ -6,18 +6,18 @@ class DirectStatus: - def __init__(self, obj, gid, listener): - self.__gid = gid - self.__listener = listener - self.__obj = obj - self.message = self.__listener.message + def __init__(self, listener, obj, gid): + self._gid = gid + self._obj = obj + self.listener = listener + self.message = listener.message def gid(self): - return self.__gid + return self._gid def progress_raw(self): try: - return self.__obj.processed_bytes / self.__obj.total_size * 100 + return self._obj.processed_bytes / self.listener.size * 100 except Exception: return 0 @@ -25,30 +25,30 @@ def progress(self): return f"{round(self.progress_raw(), 2)}%" def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def name(self): - return self.__obj.name + return self.listener.name def size(self): - return get_readable_file_size(self.__obj.total_size) + return get_readable_file_size(self.listener.size) def eta(self): try: seconds = ( - self.__obj.total_size - self.__obj.processed_bytes - ) / self.__obj.speed + self.listener.size - self._obj.processed_bytes + ) / self._obj.speed return get_readable_time(seconds) except Exception: return "-" def status(self): - if self.__obj.task and self.__obj.task.is_waiting: + if self._obj.download_task and self._obj.download_task.is_waiting: return MirrorStatus.STATUS_QUEUEDL - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_DOWNLOADING_A def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) + return get_readable_file_size(self._obj.processed_bytes) - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/extract_status.py b/bot/helper/mirror_leech_utils/status_utils/extract_status.py index 79da14460..f18f6881f 100644 --- a/bot/helper/mirror_leech_utils/status_utils/extract_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/extract_status.py @@ -1,52 +1,51 @@ from time import time -from bot import LOGGER -from bot.helper.ext_utils.bot_utils import ( +from bot import LOGGER, subprocess_lock +from bot.helper.ext_utils.files_utils import get_path_size +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - async_to_sync, get_readable_time, get_readable_file_size, ) -from bot.helper.ext_utils.files_utils import get_path_size class ExtractStatus: - def __init__(self, name, size, gid, listener): - self.__name = name - self.__size = size - self.__gid = gid - self.__listener = listener - self.__uid = listener.uid - self.__start_time = time() + def __init__(self, listener, gid): + self.listener = listener + self._size = self.listener.size + self._gid = gid + self._start_time = time() + self._proccessed_bytes = 0 self.message = listener.message def gid(self): - return self.__gid + return self._gid def speed_raw(self): - return self.processed_raw() / (time() - self.__start_time) + return self._proccessed_bytes / (time() - self._start_time) - def progress_raw(self): + async def progress_raw(self): + await self.processed_raw() try: - return self.processed_raw() / self.__size * 100 + return self._proccessed_bytes / self._size * 100 except Exception: return 0 - def progress(self): - return f"{round(self.progress_raw(), 2)}%" + async def progress(self): + return f"{round(await self.progress_raw(), 2)}%" def speed(self): return f"{get_readable_file_size(self.speed_raw())}/s" def name(self): - return self.__name + return self.listener.name def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def eta(self): try: - seconds = (self.__size - self.processed_raw()) / self.speed_raw() + seconds = (self._size - self._proccessed_bytes) / self.speed_raw() return get_readable_time(seconds) except Exception: return "-" @@ -55,20 +54,26 @@ def status(self): return MirrorStatus.STATUS_EXTRACTING def processed_bytes(self): - return get_readable_file_size(self.processed_raw()) + return get_readable_file_size(self._proccessed_bytes) - def processed_raw(self): - if self.__listener.newDir: - return async_to_sync(get_path_size, self.__listener.newDir) - return async_to_sync(get_path_size, self.__listener.dir) - self.__size + async def processed_raw(self): + if self.listener.newDir: + self._proccessed_bytes = await get_path_size(self.listener.newDir) + else: + self._proccessed_bytes = ( + await get_path_size(self.listener.dir) - self._size + ) - def download(self): + def task(self): return self - async def cancel_download(self): - LOGGER.info(f"Cancelling Extract: {self.__name}") - if self.__listener.suproc is not None: - self.__listener.suproc.kill() - else: - self.__listener.suproc = "cancelled" - await self.__listener.onUploadError("extracting stopped by user!") + async def cancel_task(self): + LOGGER.info(f"Cancelling Extract: {self.listener.name}") + self.listener.isCancelled = True + async with subprocess_lock: + if ( + self.listener.suproc is not None + and self.listener.suproc.returncode is None + ): + self.listener.suproc.kill() + await self.listener.onUploadError("extracting stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py b/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py index d348ddb04..ad728e2ad 100644 --- a/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py @@ -1,4 +1,4 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, get_readable_time, get_readable_file_size, @@ -6,37 +6,36 @@ class GdriveStatus: - def __init__(self, obj, size, message, gid, status): - self.__obj = obj - self.__size = size - self.__gid = gid - self.__status = status - self.message = message + def __init__(self, listener, obj, gid, status): + self.listener = listener + self._obj = obj + self._size = self.listener.size + self._gid = gid + self._status = status + self.message = listener.message def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) + return get_readable_file_size(self._obj.processed_bytes) def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def status(self): - if self.__status == "up": - if self.__obj.processed_bytes == 0: - return MirrorStatus.STATUS_PROCESSING - return MirrorStatus.STATUS_UPLOADING - if self.__status == "dl": - return MirrorStatus.STATUS_DOWNLOADING - return MirrorStatus.STATUS_CLONING + if self._status == "up": + return MirrorStatus.STATUS_UPLOADING_GD + if self._status == "dl": + return MirrorStatus.STATUS_DOWNLOADING_GD + return MirrorStatus.STATUS_CLONING_GD def name(self): - return self.__obj.name + return self.listener.name def gid(self) -> str: - return self.__gid + return self._gid def progress_raw(self): try: - return self.__obj.processed_bytes / self.__size * 100 + return self._obj.processed_bytes / self._size * 100 except Exception: return 0 @@ -44,14 +43,14 @@ def progress(self): return f"{round(self.progress_raw(), 2)}%" def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def eta(self): try: - seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed + seconds = (self._size - self._obj.processed_bytes) / self._obj.speed return get_readable_time(seconds) except Exception: return "-" - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/media_convert_status.py b/bot/helper/mirror_leech_utils/status_utils/media_convert_status.py new file mode 100644 index 000000000..2dcbb2ed4 --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/media_convert_status.py @@ -0,0 +1,35 @@ +from bot import LOGGER +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_file_size + + +class MediaConvertStatus: + def __init__(self, listener, gid): + self.listener = listener + self._gid = gid + self._size = self.listener.size + self.message = listener.message + + def gid(self): + return self._gid + + def name(self): + return self.listener.name + + def size(self): + return get_readable_file_size(self._size) + + def status(self): + return MirrorStatus.STATUS_CONVERTING + + def task(self): + return self + + async def cancel_task(self): + LOGGER.info(f"Cancelling Converting: {self.listener.name}") + self.listener.isCancelled = True + if ( + self.listener.suproc is not None + and self.listener.suproc.returncode is None + ): + self.listener.suproc.kill() + await self.listener.onUploadError("Converting stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/mega_status.py b/bot/helper/mirror_leech_utils/status_utils/mega_status.py index 25d2101b7..d9807af45 100644 --- a/bot/helper/mirror_leech_utils/status_utils/mega_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/mega_status.py @@ -1,4 +1,4 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, get_readable_time, get_readable_file_size, @@ -6,19 +6,23 @@ class MegaDownloadStatus: - def __init__(self, name, size, gid, obj, message): - self.__obj = obj - self.__name = name - self.__size = size - self.__gid = gid - self.message = message + def __init__(self, listener, name, size, gid, obj): + self._obj = obj + self._size = size + self._gid = gid + self._name = name + self.listener = listener + self.message = listener.message def name(self): - return self.__name + return self._name + + def task(self): + return self def progress_raw(self): try: - return round(self.__obj.downloaded_bytes / self.__size * 100, 2) + return round(self._obj.downloaded_bytes / self._size * 100, 2) except Exception: return 0.0 @@ -26,26 +30,30 @@ def progress(self): return f"{self.progress_raw()}%" def status(self): - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_DOWNLOADING_MEGA def processed_bytes(self): - return get_readable_file_size(self.__obj.downloaded_bytes) + return get_readable_file_size(self._obj.downloaded_bytes) def eta(self): try: - seconds = (self.__size - self.__obj.downloaded_bytes) / self.__obj.speed + seconds = (self._size - self._obj.downloaded_bytes) / self._obj.speed return get_readable_time(seconds) except ZeroDivisionError: return "-" def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def gid(self): - return self.__gid + return self._gid def download(self): - return self.__obj + return self._obj + + async def cancel_task(self): + self.listener.is_cancelled = True + await self.listener.onDownloadError("Download Canceled by user") diff --git a/bot/helper/mirror_leech_utils/status_utils/metadata_status.py b/bot/helper/mirror_leech_utils/status_utils/metadata_status.py new file mode 100644 index 000000000..75e09c911 --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/metadata_status.py @@ -0,0 +1,36 @@ +from bot import LOGGER, subprocess_lock +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_file_size + + +class MetadataStatus: + def __init__(self, listener, gid): + self.listener = listener + self._gid = gid + self._size = self.listener.size + self.message = listener.message + + def gid(self): + return self._gid + + def name(self): + return self.listener.name + + def size(self): + return get_readable_file_size(self._size) + + def status(self): + return MirrorStatus.STATUS_METADATA + + def task(self): + return self + + async def cancel_task(self): + LOGGER.info(f"Cancelling Metadata: {self.listener.name}") + self.listener.isCancelled = True + async with subprocess_lock: + if ( + self.listener.suproc is not None + and self.listener.suproc.returncode is None + ): + self.listener.suproc.kill() + await self.listener.onUploadError("Metadata stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/qbit_status.py b/bot/helper/mirror_leech_utils/status_utils/qbit_status.py index 28576a952..0c7e95575 100644 --- a/bot/helper/mirror_leech_utils/status_utils/qbit_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/qbit_status.py @@ -1,59 +1,57 @@ -from asyncio import sleep +from asyncio import sleep, gather from bot import LOGGER, QbTorrents, xnox_client, qb_listener_lock -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - sync_to_async, get_readable_time, get_readable_file_size, ) -def get_download(client, tag): +def get_download(tag, old_info=None): try: - return client.torrents_info(tag=tag)[0] + res = xnox_client.torrents_info(tag=tag)[0] + return res or old_info except Exception as e: LOGGER.error(f"{e}: Qbittorrent, while getting torrent info. Tag: {tag}") - return None + return old_info class QbittorrentStatus: def __init__(self, listener, seeding=False, queued=False): - self.__client = xnox_client - self.__listener = listener - self.__info = get_download(self.__client, f"{self.__listener.uid}") self.queued = queued self.seeding = seeding + self.listener = listener + self._info = None self.message = listener.message - def __update(self): - new_info = get_download(self.__client, f"{self.__listener.uid}") - if new_info is not None: - self.__info = new_info + def update(self): + self._info = get_download(f"{self.listener.mid}", self._info) def progress(self): - return f"{round(self.__info.progress*100, 2)}%" + return f"{round(self._info.progress * 100, 2)}%" def processed_bytes(self): - return get_readable_file_size(self.__info.downloaded) + return get_readable_file_size(self._info.downloaded) def speed(self): - return f"{get_readable_file_size(self.__info.dlspeed)}/s" + return f"{get_readable_file_size(self._info.dlspeed)}/s" def name(self): - if self.__info.state in ["metaDL", "checkingResumeData"]: - return f"[METADATA]{self.__info.name}" - return self.__info.name + if self._info.state in ["metaDL", "checkingResumeData"]: + return f"[METADATA]{self.listener.name}" + return self.listener.name def size(self): - return get_readable_file_size(self.__info.size) + return get_readable_file_size(self._info.size) def eta(self): - return get_readable_time(self.__info.eta) + return get_readable_time(self._info.eta) def status(self): - self.__update() - state = self.__info.state + self.update() + state = self._info.state if state == "queuedDL" or self.queued: return MirrorStatus.STATUS_QUEUEDL if state == "queuedUP": @@ -64,64 +62,60 @@ def status(self): return MirrorStatus.STATUS_CHECKING if state in ["stalledUP", "uploading"] and self.seeding: return MirrorStatus.STATUS_SEEDING - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_DOWNLOADING_Q def seeders_num(self): - return self.__info.num_seeds + return self._info.num_seeds def leechers_num(self): - return self.__info.num_leechs + return self._info.num_leechs def uploaded_bytes(self): - return get_readable_file_size(self.__info.uploaded) + return get_readable_file_size(self._info.uploaded) - def upload_speed(self): - return f"{get_readable_file_size(self.__info.upspeed)}/s" + def seed_speed(self): + return f"{get_readable_file_size(self._info.upspeed)}/s" def ratio(self): - return f"{round(self.__info.ratio, 3)}" + return f"{round(self._info.ratio, 3)}" def seeding_time(self): - return get_readable_time(self.__info.seeding_time, True) + return get_readable_time(self._info.seeding_time) - def download(self): + def task(self): return self def gid(self): - return self.hash()[:8] + return self.hash()[:12] def hash(self): - self.__update() - return self.__info.hash + return self._info.hash - def client(self): - return self.__client - - def listener(self): - return self.__listener - - async def cancel_download(self): - self.__update() + async def cancel_task(self): + self.listener.isCancelled = True + await sync_to_async(self.update) await sync_to_async( - self.__client.torrents_pause, torrent_hashes=self.__info.hash + xnox_client.torrents_pause, torrent_hashes=self._info.hash ) if not self.seeding: if self.queued: LOGGER.info(f"Cancelling QueueDL: {self.name()}") msg = "task have been removed from queue/download" else: - LOGGER.info(f"Cancelling Download: {self.__info.name}") + LOGGER.info(f"Cancelling Download: {self._info.name}") msg = "Download stopped by user!" await sleep(0.3) - await sync_to_async( - self.__client.torrents_delete, - torrent_hashes=self.__info.hash, - delete_files=True, - ) - await sync_to_async( - self.__client.torrents_delete_tags, tags=self.__info.tags + await gather( + self.listener.onDownloadError(msg), + sync_to_async( + xnox_client.torrents_delete, + torrent_hashes=self._info.hash, + delete_files=True, + ), + sync_to_async( + xnox_client.torrents_delete_tags, tags=self._info.tags + ), ) async with qb_listener_lock: - if self.__info.tags in QbTorrents: - del QbTorrents[self.__info.tags] - await self.__listener.onDownloadError(msg) + if self._info.tags in QbTorrents: + del QbTorrents[self._info.tags] diff --git a/bot/helper/mirror_leech_utils/status_utils/queue_status.py b/bot/helper/mirror_leech_utils/status_utils/queue_status.py index 2994b806c..0f8d7aef7 100644 --- a/bot/helper/mirror_leech_utils/status_utils/queue_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/queue_status.py @@ -1,27 +1,26 @@ from bot import LOGGER -from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_file_size class QueueStatus: - def __init__(self, name, size, gid, listener, status): - self.__name = name - self.__size = size - self.__gid = gid - self.__listener = listener - self.__status = status + def __init__(self, listener, gid, status): + self.listener = listener + self._size = self.listener.size + self._gid = gid + self._status = status self.message = listener.message def gid(self): - return self.__gid + return self._gid def name(self): - return self.__name + return self.listener.name def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def status(self): - if self.__status == "dl": + if self._status == "dl": return MirrorStatus.STATUS_QUEUEDL return MirrorStatus.STATUS_QUEUEUP @@ -37,16 +36,17 @@ def speed(self): def eta(self): return "-" - def download(self): + def task(self): return self - async def cancel_download(self): - LOGGER.info(f"Cancelling Queue{self.__status}: {self.__name}") - if self.__status == "dl": - await self.__listener.onDownloadError( + async def cancel_task(self): + self.listener.isCancelled = True + LOGGER.info(f"Cancelling Queue{self._status}: {self.listener.name}") + if self._status == "dl": + await self.listener.onDownloadError( "task have been removed from queue/download" ) else: - await self.__listener.onUploadError( + await self.listener.onUploadError( "task have been removed from queue/upload" ) diff --git a/bot/helper/mirror_leech_utils/status_utils/rclone_status.py b/bot/helper/mirror_leech_utils/status_utils/rclone_status.py index 07c4aae5a..12b462ff6 100644 --- a/bot/helper/mirror_leech_utils/status_utils/rclone_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/rclone_status.py @@ -1,40 +1,41 @@ -from bot.helper.ext_utils.bot_utils import MirrorStatus +from bot.helper.ext_utils.status_utils import MirrorStatus class RcloneStatus: - def __init__(self, obj, message, gid, status): - self.__obj = obj - self.__gid = gid - self.__status = status - self.message = message + def __init__(self, listener, obj, gid, status): + self._obj = obj + self._gid = gid + self._status = status + self.listener = listener + self.message = listener.message def gid(self): - return self.__gid + return self._gid def progress(self): - return self.__obj.percentage + return self._obj.percentage def speed(self): - return self.__obj.speed + return self._obj.speed def name(self): - return self.__obj.name + return self.listener.name def size(self): - return self.__obj.size + return self._obj.size def eta(self): - return self.__obj.eta + return self._obj.eta def status(self): - if self.__status == "dl": - return MirrorStatus.STATUS_DOWNLOADING - if self.__status == "up": - return MirrorStatus.STATUS_UPLOADING - return MirrorStatus.STATUS_CLONING + if self._status == "dl": + return MirrorStatus.STATUS_DOWNLOADING_RC + if self._status == "up": + return MirrorStatus.STATUS_UPLOADING_RC + return MirrorStatus.STATUS_CLONING_RC def processed_bytes(self): - return self.__obj.transferred_size + return self._obj.transferred_size - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/sample_video_status.py b/bot/helper/mirror_leech_utils/status_utils/sample_video_status.py new file mode 100644 index 000000000..2ce71b19d --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/sample_video_status.py @@ -0,0 +1,35 @@ +from bot import LOGGER +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_file_size + + +class SampleVideoStatus: + def __init__(self, listener, gid): + self.listener = listener + self._gid = gid + self._size = self.listener.size + self.message = listener.message + + def gid(self): + return self._gid + + def name(self): + return self.listener.name + + def size(self): + return get_readable_file_size(self._size) + + def status(self): + return MirrorStatus.STATUS_SAMVID + + def task(self): + return self + + async def cancel_task(self): + LOGGER.info(f"Cancelling Sample Video: {self.listener.name}") + self.listener.isCancelled = True + if ( + self.listener.suproc is not None + and self.listener.suproc.returncode is None + ): + self.listener.suproc.kill() + await self.listener.onUploadError("Creating sample video stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/split_status.py b/bot/helper/mirror_leech_utils/status_utils/split_status.py index 3f465b281..d150257bd 100644 --- a/bot/helper/mirror_leech_utils/status_utils/split_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/split_status.py @@ -1,46 +1,36 @@ -from bot import LOGGER -from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size +from bot import LOGGER, subprocess_lock +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_file_size class SplitStatus: - def __init__(self, name, size, gid, listener): - self.__name = name - self.__gid = gid - self.__size = size - self.__listener = listener + def __init__(self, listener, gid): + self.listener = listener + self._gid = gid + self._size = self.listener.size self.message = listener.message def gid(self): - return self.__gid - - def progress(self): - return "0" - - def speed(self): - return "0" + return self._gid def name(self): - return self.__name + return self.listener.name def size(self): - return get_readable_file_size(self.__size) - - def eta(self): - return "0s" + return get_readable_file_size(self._size) def status(self): return MirrorStatus.STATUS_SPLITTING - def processed_bytes(self): - return 0 - - def download(self): + def task(self): return self - async def cancel_download(self): - LOGGER.info(f"Cancelling Split: {self.__name}") - if self.__listener.suproc is not None: - self.__listener.suproc.kill() - else: - self.__listener.suproc = "cancelled" - await self.__listener.onUploadError("splitting stopped by user!") + async def cancel_task(self): + LOGGER.info(f"Cancelling Split: {self.listener.name}") + self.listener.isCancelled = True + async with subprocess_lock: + if ( + self.listener.suproc is not None + and self.listener.suproc.returncode is None + ): + self.listener.suproc.kill() + await self.listener.onUploadError("splitting stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/telegram_status.py b/bot/helper/mirror_leech_utils/status_utils/telegram_status.py index af0ca55c8..e02f2f226 100644 --- a/bot/helper/mirror_leech_utils/status_utils/telegram_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/telegram_status.py @@ -1,4 +1,4 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, get_readable_time, get_readable_file_size, @@ -6,48 +6,47 @@ class TelegramStatus: - def __init__(self, obj, size, message, gid, status): - self.__obj = obj - self.__size = size - self.__gid = gid - self.__status = status - self.message = message + def __init__(self, listener, obj, gid, status): + self.listener = listener + self._obj = obj + self._size = self.listener.size + self._gid = gid + self._status = status + self.message = listener.message def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) + return get_readable_file_size(self._obj.processed_bytes) def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def status(self): - if self.__status == "up": - if self.__obj.processed_bytes == 0: - return MirrorStatus.STATUS_PROCESSING - return MirrorStatus.STATUS_UPLOADING - return MirrorStatus.STATUS_DOWNLOADING + if self._status == "up": + return MirrorStatus.STATUS_UPLOADING_TG + return MirrorStatus.STATUS_DOWNLOADING_TG def name(self): - return self.__obj.name + return self.listener.name def progress(self): try: - progress_raw = self.__obj.processed_bytes / self.__size * 100 + progress_raw = self._obj.processed_bytes / self._size * 100 except Exception: progress_raw = 0 return f"{round(progress_raw, 2)}%" def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def eta(self): try: - seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed + seconds = (self._size - self._obj.processed_bytes) / self._obj.speed return get_readable_time(seconds) except Exception: return "-" - def gid(self) -> str: - return self.__gid + def gid(self): + return self._gid - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/yt_dlp_download_status.py b/bot/helper/mirror_leech_utils/status_utils/yt_dlp_download_status.py new file mode 100644 index 000000000..39e9401ec --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/yt_dlp_download_status.py @@ -0,0 +1,57 @@ +from bot.helper.ext_utils.files_utils import get_path_size +from bot.helper.ext_utils.status_utils import ( + MirrorStatus, + get_readable_time, + get_readable_file_size, +) + + +class YtDlpDownloadStatus: + def __init__(self, listener, obj, gid): + self._obj = obj + self._gid = gid + self.listener = listener + self._proccessed_bytes = 0 + self.message = listener.message + + def gid(self): + return self._gid + + def processed_bytes(self): + return get_readable_file_size(self._proccessed_bytes) + + async def processed_raw(self): + if self._obj.downloaded_bytes != 0: + self._proccessed_bytes = self._obj.downloaded_bytes + else: + self._proccessed_bytes = await get_path_size(self.listener.dir) + + def size(self): + return get_readable_file_size(self._obj.size) + + def status(self): + return MirrorStatus.STATUS_DOWNLOADING_YT + + def name(self): + return self.listener.name + + async def progress(self): + await self.processed_raw() + return f"{round(self._obj.progress, 2)}%" + + def speed(self): + return f"{get_readable_file_size(self._obj.download_speed)}/s" + + def eta(self): + if self._obj.eta != "-": + return get_readable_time(self._obj.eta) + try: + seconds = ( + self._obj.size - self._proccessed_bytes + ) / self._obj.download_speed + return get_readable_time(seconds) + except Exception: + return "-" + + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py b/bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py deleted file mode 100644 index 100ad65cc..000000000 --- a/bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py +++ /dev/null @@ -1,55 +0,0 @@ -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - async_to_sync, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import get_path_size - - -class YtDlpDownloadStatus: - def __init__(self, obj, listener, gid): - self.__obj = obj - self.__listener = listener - self.__gid = gid - self.message = listener.message - - def gid(self): - return self.__gid - - def processed_bytes(self): - return get_readable_file_size(self.processed_raw()) - - def processed_raw(self): - if self.__obj.downloaded_bytes != 0: - return self.__obj.downloaded_bytes - return async_to_sync(get_path_size, self.__listener.dir) - - def size(self): - return get_readable_file_size(self.__obj.size) - - def status(self): - return MirrorStatus.STATUS_DOWNLOADING - - def name(self): - return self.__obj.name - - def progress(self): - return f"{round(self.__obj.progress, 2)}%" - - def speed(self): - return f"{get_readable_file_size(self.__obj.download_speed)}/s" - - def eta(self): - if self.__obj.eta != "-": - return get_readable_time(self.__obj.eta) - try: - seconds = ( - self.__obj.size - self.processed_raw() - ) / self.__obj.download_speed - return get_readable_time(seconds) - except Exception: - return "-" - - def download(self): - return self.__obj diff --git a/bot/helper/mirror_leech_utils/status_utils/zip_status.py b/bot/helper/mirror_leech_utils/status_utils/zip_status.py index dde5f11b6..c8d9b45e5 100644 --- a/bot/helper/mirror_leech_utils/status_utils/zip_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/zip_status.py @@ -1,52 +1,51 @@ from time import time -from bot import LOGGER -from bot.helper.ext_utils.bot_utils import ( +from bot import LOGGER, subprocess_lock +from bot.helper.ext_utils.files_utils import get_path_size +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - async_to_sync, get_readable_time, get_readable_file_size, ) -from bot.helper.ext_utils.files_utils import get_path_size class ZipStatus: - def __init__(self, name, size, gid, listener): - self.__name = name - self.__size = size - self.__gid = gid - self.__listener = listener - self.__uid = listener.uid - self.__start_time = time() + def __init__(self, listener, gid): + self.listener = listener + self._size = self.listener.size + self._gid = gid + self._start_time = time() + self._proccessed_bytes = 0 self.message = listener.message def gid(self): - return self.__gid + return self._gid def speed_raw(self): - return self.processed_raw() / (time() - self.__start_time) + return self._proccessed_bytes / (time() - self._start_time) - def progress_raw(self): + async def progress_raw(self): + await self.processed_raw() try: - return self.processed_raw() / self.__size * 100 + return self._proccessed_bytes / self._size * 100 except Exception: return 0 - def progress(self): - return f"{round(self.progress_raw(), 2)}%" + async def progress(self): + return f"{round(await self.progress_raw(), 2)}%" def speed(self): return f"{get_readable_file_size(self.speed_raw())}/s" def name(self): - return self.__name + return self.listener.name def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def eta(self): try: - seconds = (self.__size - self.processed_raw()) / self.speed_raw() + seconds = (self._size - self._proccessed_bytes) / self.speed_raw() return get_readable_time(seconds) except Exception: return "-" @@ -54,21 +53,27 @@ def eta(self): def status(self): return MirrorStatus.STATUS_ARCHIVING - def processed_raw(self): - if self.__listener.newDir: - return async_to_sync(get_path_size, self.__listener.newDir) - return async_to_sync(get_path_size, self.__listener.dir) - self.__size + async def processed_raw(self): + if self.listener.newDir: + self._proccessed_bytes = await get_path_size(self.listener.newDir) + else: + self._proccessed_bytes = ( + await get_path_size(self.listener.dir) - self._size + ) def processed_bytes(self): - return get_readable_file_size(self.processed_raw()) + return get_readable_file_size(self._proccessed_bytes) - def download(self): + def task(self): return self - async def cancel_download(self): - LOGGER.info(f"Cancelling Archive: {self.__name}") - if self.__listener.suproc is not None: - self.__listener.suproc.kill() - else: - self.__listener.suproc = "cancelled" - await self.__listener.onUploadError("archiving stopped by user!") + async def cancel_task(self): + LOGGER.info(f"Cancelling Archive: {self.listener.name}") + self.listener.isCancelled = True + async with subprocess_lock: + if ( + self.listener.suproc is not None + and self.listener.suproc.returncode is None + ): + self.listener.suproc.kill() + await self.listener.onUploadError("archiving stopped by user!") diff --git a/bot/helper/mirror_leech_utils/telegram_uploader.py b/bot/helper/mirror_leech_utils/telegram_uploader.py new file mode 100644 index 000000000..df3d91e94 --- /dev/null +++ b/bot/helper/mirror_leech_utils/telegram_uploader.py @@ -0,0 +1,493 @@ +from os import path as ospath +from os import walk +from re import sub as re_sub +from re import match as re_match +from time import time +from asyncio import sleep +from logging import getLogger +from contextlib import suppress + +from PIL import Image +from natsort import natsorted +from tenacity import ( + RetryError, + retry, + wait_exponential, + stop_after_attempt, + retry_if_exception_type, +) +from aioshutil import copy, rmtree +from aiofiles.os import ( + path as aiopath, +) +from aiofiles.os import ( + remove, + rename, + makedirs, +) +from pyrogram.types import InputMediaPhoto, InputMediaVideo, InputMediaDocument +from pyrogram.errors import RPCError, FloodWait + +from bot import bot, user, config_dict +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.files_utils import ( + is_archive, + get_base_name, + clean_unwanted, +) +from bot.helper.ext_utils.media_utils import ( + get_media_info, + get_audio_thumb, + create_thumbnail, + get_document_type, +) +from bot.helper.aeon_utils.caption_gen import generate_caption +from bot.helper.telegram_helper.message_utils import delete_message + +LOGGER = getLogger(__name__) + + +class TgUploader: + def __init__(self, listener, path): + self._last_uploaded = 0 + self._processed_bytes = 0 + self._listener = listener + self._path = path + self._start_time = time() + self._total_files = 0 + self._thumb = self._listener.thumb or f"Thumbnails/{listener.userId}.jpg" + self._msgs_dict = {} + self._corrupted = 0 + self._is_corrupted = False + self._media_dict = {"videos": {}, "documents": {}} + self._last_msg_in_group = False + self._up_path = "" + self._user_dump = "" + self._lprefix = "" + self._lcaption = "" + self._is_private = False + self._sent_msg = None + self._user_id = listener.userId + self._user_session = user + + async def _upload_progress(self, current, _): + if self._listener.isCancelled: + if self._user_session: + user.stop_transmission() + else: + self._listener.client.stop_transmission() + chunk_size = current - self._last_uploaded + self._last_uploaded = current + self._processed_bytes += chunk_size + + async def _user_settings(self): + self._user_dump = self._listener.userDict.get("user_dump") + self._lprefix = self._listener.userDict.get("lprefix") + self._lcaption = self._listener.userDict.get("lcaption") + if not await aiopath.exists(self._thumb): + self._thumb = None + + async def _msg_to_reply(self): + msg = "Task started" + self._listener.upDest = config_dict["LEECH_DUMP_CHAT"] + try: + if self._user_session: + self._sent_msg = await user.send_message( + chat_id=self._listener.upDest, + text=msg, + disable_web_page_preview=True, + disable_notification=True, + ) + else: + self._sent_msg = await self._listener.client.send_message( + chat_id=self._listener.upDest, + text=msg, + disable_web_page_preview=True, + disable_notification=True, + ) + self._is_private = self._sent_msg.chat.type.name == "PRIVATE" + except Exception as e: + await self._listener.onUploadError(str(e)) + return False + return True + + async def _prepare_file(self, file_, dirpath, delete_file): + if self._lcaption: + cap_mono = await generate_caption(file_, dirpath, self._lcaption) + if self._lprefix: + if not self._lcaption: + cap_mono = f"{self._lprefix}{file_}
" + self._lprefix = re_sub("<.*?>", "", self._lprefix) + if ( + self._listener.seed + and not self._listener.newDir + and not dirpath.endswith("/splited_files_joya") + and not delete_file + ): + dirpath = f"{dirpath}/copied_joya" + await makedirs(dirpath, exist_ok=True) + new_path = ospath.join(dirpath, f"{self._lprefix} {file_}") + self._up_path = await copy(self._up_path, new_path) + else: + new_path = ospath.join(dirpath, f"{self._lprefix} {file_}") + await rename(self._up_path, new_path) + self._up_path = new_path + if not self._lcaption and not self._lprefix: + cap_mono = f"{file_}
" + if len(file_) > 60: + if is_archive(file_): + name = get_base_name(file_) + ext = file_.split(name, 1)[1] + elif match := re_match( + r".+(?=\..+\.0*\d+$)|.+(?=\.part\d+\..+$)", file_ + ): + name = match.group(0) + ext = file_.split(name, 1)[1] + elif len(fsplit := ospath.splitext(file_)) > 1: + name = fsplit[0] + ext = fsplit[1] + else: + name = file_ + ext = "" + extn = len(ext) + remain = 60 - extn + name = name[:remain] + if ( + self._listener.seed + and not self._listener.newDir + and not dirpath.endswith("/splited_files_joya") + and not delete_file + ): + dirpath = f"{dirpath}/copied_joya" + await makedirs(dirpath, exist_ok=True) + new_path = ospath.join(dirpath, f"{name}{ext}") + self._up_path = await copy(self._up_path, new_path) + else: + new_path = ospath.join(dirpath, f"{name}{ext}") + await rename(self._up_path, new_path) + self._up_path = new_path + return cap_mono + + def _get_input_media(self, subkey, key): + rlist = [] + for msg in self._media_dict[key][subkey]: + if key == "videos": + input_media = InputMediaVideo( + media=msg.video.file_id, caption=msg.caption + ) + else: + input_media = InputMediaDocument( + media=msg.document.file_id, caption=msg.caption + ) + rlist.append(input_media) + return rlist + + async def _send_screenshots(self, dirpath, outputs): + inputs = [ + InputMediaPhoto(ospath.join(dirpath, p), p.rsplit("/", 1)[-1]) + for p in outputs + ] + send_ss = await self._sent_msg.reply_media_group( + media=inputs, + quote=True, + disable_notification=True, + ) + await bot.copy_media_group(self._user_id, send_ss[0].chat.id, send_ss[0].id) + self._sent_msg = (send_ss)[-1] + + async def _send_media_group(self, subkey, key, msgs): + for index, msg in enumerate(msgs): + if not self._user_session: + msgs[index] = await self._listener.client.get_messages( + chat_id=msg[0], message_ids=msg[1] + ) + else: + msgs[index] = await user.get_messages( + chat_id=msg[0], message_ids=msg[1] + ) + msgs_list = await msgs[0].reply_to_message.reply_media_group( + media=self._get_input_media(subkey, key), + quote=True, + disable_notification=True, + ) + for msg in msgs: + if msg.link in self._msgs_dict: + del self._msgs_dict[msg.link] + await delete_message(msg) + del self._media_dict[key][subkey] + if self._listener.isSuperChat or self._listener.upDest: + for m in msgs_list: + self._msgs_dict[m.link] = m.caption + self._sent_msg = msgs_list[-1] + + async def upload(self, o_files, ft_delete): + await self._user_settings() + res = await self._msg_to_reply() + if not res: + return + for dirpath, _, files in natsorted(await sync_to_async(walk, self._path)): + if dirpath.endswith("/yt-dlp-thumb"): + continue + if dirpath.endswith("_joyass"): + await self._send_screenshots(dirpath, files) + await rmtree(dirpath, ignore_errors=True) + continue + for file_ in natsorted(files): + delete_file = False + self._up_path = f_path = ospath.join(dirpath, file_) + if self._up_path in ft_delete: + delete_file = True + if self._up_path in o_files: + continue + if file_.lower().endswith(tuple(self._listener.extensionFilter)): + if not self._listener.seed or self._listener.newDir: + await remove(self._up_path) + continue + try: + f_size = await aiopath.getsize(self._up_path) + self._total_files += 1 + if f_size == 0: + LOGGER.error( + f"{self._up_path} size is zero, telegram don't upload zero size files" + ) + self._corrupted += 1 + continue + if self._listener.isCancelled: + return + cap_mono = await self._prepare_file(file_, dirpath, delete_file) + if self._last_msg_in_group: + group_lists = [ + x for v in self._media_dict.values() for x in v + ] + match = re_match( + r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", f_path + ) + if not match or match and match.group(0) not in group_lists: + for key, value in list(self._media_dict.items()): + for subkey, msgs in list(value.items()): + if len(msgs) > 1: + await self._send_media_group( + subkey, key, msgs + ) + self._last_msg_in_group = False + self._last_uploaded = 0 + await self._upload_file(cap_mono, file_, f_path) + if self._listener.isCancelled: + return + if ( + not self._is_corrupted + and (self._listener.isSuperChat or self._listener.upDest) + and not self._is_private + ): + self._msgs_dict[self._sent_msg.link] = file_ + await sleep(1) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info( + f"Total Attempts: {err.last_attempt.attempt_number}" + ) + err = err.last_attempt.exception() + LOGGER.error(f"{err}. Path: {self._up_path}") + self._corrupted += 1 + if self._listener.isCancelled: + return + continue + finally: + if ( + not self._listener.isCancelled + and await aiopath.exists(self._up_path) + and ( + not self._listener.seed + or self._listener.newDir + or dirpath.endswith("/splited_files_joya") + or "/copied_joya/" in self._up_path + or delete_file + ) + ): + await remove(self._up_path) + for key, value in list(self._media_dict.items()): + for subkey, msgs in list(value.items()): + if len(msgs) > 1: + try: + await self._send_media_group(subkey, key, msgs) + except Exception as e: + LOGGER.info( + f"While sending media group at the end of task. Error: {e}" + ) + if self._listener.isCancelled: + return + if self._listener.seed and not self._listener.newDir: + await clean_unwanted(self._path) + if self._total_files == 0: + await self._listener.onUploadError( + "No files to upload. In case you have filled EXTENSION_FILTER, then check if all files have those extensions or not." + ) + return + if self._total_files <= self._corrupted: + await self._listener.onUploadError( + "Files Corrupted or unable to upload. Check logs!" + ) + return + await self._listener.onUploadComplete( + None, self._msgs_dict, self._total_files, self._corrupted + ) + + @retry( + wait=wait_exponential(multiplier=2, min=4, max=8), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + async def _upload_file(self, cap_mono, file, o_path, force_document=False): + if self._thumb and not await aiopath.exists(self._thumb): + self._thumb = None + thumb = self._thumb + self._is_corrupted = False + + try: + is_video, is_audio, is_image = await get_document_type(self._up_path) + + if not is_image and not thumb: + file_name = ospath.splitext(file)[0] + thumb_path = f"{self._path}/yt-dlp-thumb/{file_name}.jpg" + if await aiopath.isfile(thumb_path): + thumb = thumb_path + elif is_audio and not is_video: + thumb = await get_audio_thumb(self._up_path) + + if ( + self._listener.asDoc + or force_document + or (not is_video and not is_audio and not is_image) + ): + isDoc = True + await self._upload_document(cap_mono, thumb, is_video) + elif is_video: + await self._upload_video(cap_mono, thumb) + elif is_audio: + await self._upload_audio(cap_mono, thumb) + else: + await self._upload_photo(cap_mono, thumb) + + if not self._thumb and thumb and await aiopath.exists(thumb): + await remove(thumb) + + except FloodWait as f: + LOGGER.warning(str(f)) + await sleep(f.value * 1.3) + if not self._thumb and thumb and await aiopath.exists(thumb): + await remove(thumb) + return await self._upload_file(cap_mono, file, o_path) + except Exception as err: + if not self._thumb and thumb and await aiopath.exists(thumb): + await remove(thumb) + err_type = "RPCError: " if isinstance(err, RPCError) else "" + LOGGER.error(f"{err_type}{err}. Path: {self._up_path}") + if "Telegram says: [400" in str(err) and not isDoc: + LOGGER.error(f"Retrying As Document. Path: {self._up_path}") + return await self._upload_file(cap_mono, file, o_path, True) + raise err + + async def _upload_document(self, cap_mono, thumb, is_video): + if is_video and not thumb: + thumb = await create_thumbnail(self._up_path, None) + if self._listener.isCancelled: + return + self._sent_msg = await self._sent_msg.reply_document( + document=self._up_path, + quote=True, + thumb=thumb, + caption=cap_mono, + force_document=True, + disable_notification=True, + progress=self._upload_progress, + ) + await self._copy_message(cap_mono) + + async def _upload_video(self, cap_mono, thumb): + duration = (await get_media_info(self._up_path))[0] + if not thumb: + thumb = await create_thumbnail(self._up_path, duration) + width, height = self._get_image_dimensions(thumb) + if self._listener.isCancelled: + return + self._sent_msg = await self._sent_msg.reply_video( + video=self._up_path, + quote=True, + caption=cap_mono, + duration=duration, + width=width, + height=height, + thumb=thumb, + supports_streaming=True, + disable_notification=True, + progress=self._upload_progress, + ) + await self._copy_message(cap_mono) + + async def _upload_audio(self, cap_mono, thumb): + duration, artist, title = await get_media_info(self._up_path) + if self._listener.isCancelled: + return + self._sent_msg = await self._sent_msg.reply_audio( + audio=self._up_path, + quote=True, + caption=cap_mono, + duration=duration, + performer=artist, + title=title, + thumb=thumb, + disable_notification=True, + progress=self._upload_progress, + ) + await self._copy_message(cap_mono) + + async def _upload_photo(self, cap_mono, thumb): + if self._listener.isCancelled: + return + self._sent_msg = await self._sent_msg.reply_photo( + photo=self._up_path, + quote=True, + caption=cap_mono, + disable_notification=True, + progress=self._upload_progress, + ) + await self._copy_message(cap_mono) + + async def _copy_message(self, cap_mono): + with suppress(Exception): + await bot.copy_message( + self._user_id, self._sent_msg.chat.id, self._sent_msg.id, cap_mono + ) + if self._user_dump: + with suppress(Exception): + await bot.copy_message( + self._user_dump, + self._sent_msg.chat.id, + self._sent_msg.id, + cap_mono, + ) + + def _get_image_dimensions(self, thumb): + if thumb: + with Image.open(thumb) as img: + width, height = img.size + else: + width = 480 + height = 320 + return width, height + + @property + def speed(self): + try: + return self._processed_bytes / (time() - self._start_time) + except Exception: + return 0 + + @property + def processed_bytes(self): + return self._processed_bytes + + async def cancel_task(self): + self._listener.isCancelled = True + LOGGER.info(f"Cancelling Upload: {self._listener.name}") + await self._listener.onUploadError("Your upload has been stopped!") diff --git a/bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py b/bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py deleted file mode 100644 index 0e999f1dc..000000000 --- a/bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py +++ /dev/null @@ -1,971 +0,0 @@ -import contextlib -from io import FileIO -from os import path as ospath -from os import remove as osremove -from os import listdir, makedirs -from re import search as re_search -from time import time -from pickle import load as pload -from random import randrange -from logging import ERROR, getLogger -from urllib.parse import quote as rquote -from urllib.parse import parse_qs, urlparse - -from tenacity import ( - RetryError, - retry, - wait_exponential, - stop_after_attempt, - retry_if_exception_type, -) -from google.oauth2 import service_account -from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload -from googleapiclient.errors import HttpError -from googleapiclient.discovery import build - -from bot import GLOBAL_EXTENSION_FILTER, config_dict, list_drives_dict -from bot.helper.aeon_utils.metadata import add_attachment -from bot.helper.ext_utils.bot_utils import ( - SetInterval, - is_mkv, - async_to_sync, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import process_file, get_mime_type - -LOGGER = getLogger(__name__) -getLogger("googleapiclient.discovery").setLevel(ERROR) - - -class GoogleDriveHelper: - def __init__(self, name=None, path=None, listener=None): - self.__OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"] - self.__G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder" - self.__G_DRIVE_BASE_DOWNLOAD_URL = ( - "https://drive.google.com/uc?id={}&export=download" - ) - self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL = ( - "https://drive.google.com/drive/folders/{}" - ) - self.__listener = listener - self.__user_id = listener.message.from_user.id if listener else None - self.__path = path - self.__total_bytes = 0 - self.__total_files = 0 - self.__total_folders = 0 - self.__processed_bytes = 0 - self.__total_time = 0 - self.__start_time = 0 - self.__alt_auth = False - self.__is_uploading = False - self.__is_downloading = False - self.__is_cloning = False - self.__is_cancelled = False - self.__is_errored = False - self.__status = None - self.__updater = None - self.__update_interval = 3 - self.__sa_index = 0 - self.__sa_count = 1 - self.__sa_number = 100 - self.__service = self.__authorize() - self.__file_processed_bytes = 0 - self.__processed_bytes = 0 - self.name = name - - @property - def speed(self): - try: - return self.__processed_bytes / self.__total_time - except Exception: - return 0 - - @property - def processed_bytes(self): - return self.__processed_bytes - - def __authorize(self): - credentials = None - if config_dict["USE_SERVICE_ACCOUNTS"]: - json_files = listdir("accounts") - self.__sa_number = len(json_files) - self.__sa_index = randrange(self.__sa_number) - LOGGER.info( - f"Authorizing with {json_files[self.__sa_index]} service account" - ) - credentials = service_account.Credentials.from_service_account_file( - f"accounts/{json_files[self.__sa_index]}", scopes=self.__OAUTH_SCOPE - ) - elif ospath.exists("token.pickle"): - LOGGER.info("Authorize with token.pickle") - with open("token.pickle", "rb") as f: - credentials = pload(f) - else: - LOGGER.error("token.pickle not found!") - return build("drive", "v3", credentials=credentials, cache_discovery=False) - - def __alt_authorize(self): - if not self.__alt_auth: - self.__alt_auth = True - if ospath.exists("token.pickle"): - LOGGER.info("Authorize with token.pickle") - with open("token.pickle", "rb") as f: - credentials = pload(f) - return build( - "drive", "v3", credentials=credentials, cache_discovery=False - ) - LOGGER.error("token.pickle not found!") - return None - - def __switchServiceAccount(self): - if self.__sa_index == self.__sa_number - 1: - self.__sa_index = 0 - else: - self.__sa_index += 1 - self.__sa_count += 1 - LOGGER.info(f"Switching to {self.__sa_index} index") - self.__service = self.__authorize() - - @staticmethod - def getIdFromUrl(link): - if "folders" in link or "file" in link: - regex = r"https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)" - res = re_search(regex, link) - if res is None: - raise IndexError("G-Drive ID not found.") - return res.group(3) - parsed = urlparse(link) - return parse_qs(parsed.query)["id"][0] - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def getFolderData(self, file_id): - try: - meta = ( - self.__service.files() - .get(fileId=file_id, supportsAllDrives=True) - .execute() - ) - if meta.get("mimeType", "") == self.__G_DRIVE_DIR_MIME_TYPE: - return meta.get("name") - except Exception: - return None - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def __getFileMetadata(self, file_id): - return ( - self.__service.files() - .get( - fileId=file_id, - supportsAllDrives=True, - fields="name, id, mimeType, size", - ) - .execute() - ) - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def getFilesByFolderId(self, folder_id): - page_token = None - files = [] - while True: - response = ( - self.__service.files() - .list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - q=f"'{folder_id}' in parents and trashed = false", - spaces="drive", - pageSize=200, - fields="nextPageToken, files(id, name, mimeType, size, shortcutDetails)", - orderBy="folder, name", - pageToken=page_token, - ) - .execute() - ) - files.extend(response.get("files", [])) - page_token = response.get("nextPageToken") - if page_token is None: - break - return files - - async def __progress(self): - if self.__status is not None: - chunk_size = ( - self.__status.total_size * self.__status.progress() - - self.__file_processed_bytes - ) - self.__file_processed_bytes = ( - self.__status.total_size * self.__status.progress() - ) - self.__processed_bytes += chunk_size - self.__total_time += self.__update_interval - - def deletefile(self, link: str): - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - return "Google Drive ID could not be found in the provided link" - msg = "" - try: - self.__service.files().delete( - fileId=file_id, supportsAllDrives=True - ).execute() - msg = "Successfully deleted" - LOGGER.info(f"Delete Result: {msg}") - except HttpError as err: - if "File not found" in str(err) or "insufficientFilePermissions" in str( - err - ): - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - return self.deletefile(link) - err = "File not found or insufficientFilePermissions!" - LOGGER.error(f"Delete Result: {err}") - msg = str(err) - return msg - - def upload(self, file_name, size, gdrive_id): - if not gdrive_id: - gdrive_id = config_dict["GDRIVE_ID"] - self.__is_uploading = True - item_path = f"{self.__path}/{file_name}" - LOGGER.info(f"Uploading: {item_path}") - self.__updater = SetInterval(self.__update_interval, self.__progress) - try: - if ospath.isfile(item_path): - if item_path.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - raise Exception( - "This file extension is excluded by extension filter!" - ) - mime_type = get_mime_type(item_path) - link = self.__upload_file( - item_path, file_name, mime_type, gdrive_id, is_dir=False - ) - if self.__is_cancelled: - return - if link is None: - raise Exception("Upload has been manually cancelled") - LOGGER.info(f"Uploaded To G-Drive: {item_path}") - else: - mime_type = "Folder" - dir_id = self.__create_directory( - ospath.basename(ospath.abspath(file_name)), gdrive_id - ) - result = self.__upload_dir(item_path, dir_id) - if result is None: - raise Exception("Upload has been manually cancelled!") - link = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - if self.__is_cancelled: - return - LOGGER.info(f"Uploaded To G-Drive: {file_name}") - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - async_to_sync(self.__listener.onUploadError, err) - self.__is_errored = True - finally: - self.__updater.cancel() - if self.__is_cancelled and not self.__is_errored: - if mime_type == "Folder": - LOGGER.info("Deleting uploaded data from Drive...") - link = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - self.deletefile(link) - return - if self.__is_errored: - return - async_to_sync( - self.__listener.onUploadComplete, - link, - size, - self.__total_files, - self.__total_folders, - mime_type, - file_name, - ) - - def __upload_dir(self, input_directory, dest_id): - list_dirs = listdir(input_directory) - if len(list_dirs) == 0: - return dest_id - new_id = None - for item in list_dirs: - current_file_name = ospath.join(input_directory, item) - if ospath.isdir(current_file_name): - current_dir_id = self.__create_directory(item, dest_id) - new_id = self.__upload_dir(current_file_name, current_dir_id) - self.__total_folders += 1 - elif not item.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - mime_type = get_mime_type(current_file_name) - file_name = current_file_name.split("/")[-1] - self.__upload_file(current_file_name, file_name, mime_type, dest_id) - self.__total_files += 1 - new_id = dest_id - else: - osremove(current_file_name) - new_id = "filter" - if self.__is_cancelled: - break - return new_id - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def __create_directory(self, directory_name, dest_id): - directory_name, _ = async_to_sync( - process_file, directory_name, self.__user_id, is_mirror=True - ) - file_metadata = { - "name": directory_name, - "description": "Uploaded by Aeon", - "mimeType": self.__G_DRIVE_DIR_MIME_TYPE, - } - if dest_id is not None: - file_metadata["parents"] = [dest_id] - file = ( - self.__service.files() - .create(body=file_metadata, supportsAllDrives=True) - .execute() - ) - file_id = file.get("id") - LOGGER.info( - f'Created G-Drive Folder:\nName: {file.get("name")}\nID: {file_id}' - ) - return file_id - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=(retry_if_exception_type(Exception)), - ) - def __upload_file(self, file_path, file_name, mime_type, dest_id, is_dir=True): - location = ospath.dirname(file_path) - file_name, _ = async_to_sync( - process_file, file_name, self.__user_id, location, True - ) - if (atc := self.__listener.attachment) and is_mkv(file_name): - file_name = async_to_sync(add_attachment, file_name, location, atc) - file_metadata = { - "name": file_name, - "description": "Uploaded by Aeon", - "mimeType": mime_type, - } - if dest_id is not None: - file_metadata["parents"] = [dest_id] - - if ospath.getsize(file_path) == 0: - media_body = MediaFileUpload( - file_path, mimetype=mime_type, resumable=False - ) - response = ( - self.__service.files() - .create( - body=file_metadata, media_body=media_body, supportsAllDrives=True - ) - .execute() - ) - drive_file = ( - self.__service.files() - .get(fileId=response["id"], supportsAllDrives=True) - .execute() - ) - return self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) - media_body = MediaFileUpload( - file_path, - mimetype=mime_type, - resumable=True, - chunksize=100 * 1024 * 1024, - ) - - drive_file = self.__service.files().create( - body=file_metadata, media_body=media_body, supportsAllDrives=True - ) - response = None - retries = 0 - while response is None and not self.__is_cancelled: - try: - self.__status, response = drive_file.next_chunk() - except HttpError as err: - if err.resp.status in [500, 502, 503, 504] and retries < 10: - retries += 1 - continue - if err.resp.get("content-type", "").startswith("application/json"): - reason = ( - eval(err.content).get("error").get("errors")[0].get("reason") - ) - if reason not in [ - "userRateLimitExceeded", - "dailyLimitExceeded", - ]: - raise err - if config_dict["USE_SERVICE_ACCOUNTS"]: - if self.__sa_count >= self.__sa_number: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" - ) - raise err - if self.__is_cancelled: - return None - self.__switchServiceAccount() - LOGGER.info(f"Got: {reason}, Trying Again.") - return self.__upload_file( - file_path, file_name, mime_type, dest_id - ) - LOGGER.error(f"Got: {reason}") - raise err - if self.__is_cancelled: - return None - if not self.__listener.seed or self.__listener.newDir: - with contextlib.suppress(Exception): - osremove(file_path) - self.__file_processed_bytes = 0 - if not is_dir: - drive_file = ( - self.__service.files() - .get(fileId=response["id"], supportsAllDrives=True) - .execute() - ) - return self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) - return None - - def clone(self, link, gdrive_id): - if not gdrive_id: - gdrive_id = config_dict["GDRIVE_ID"] - self.__is_cloning = True - self.__start_time = time() - self.__total_files = 0 - self.__total_folders = 0 - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - return "Google Drive ID could not be found in the provided link" - msg = "" - LOGGER.info(f"File ID: {file_id}") - try: - meta = self.__getFileMetadata(file_id) - mime_type = meta.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - dir_id = self.__create_directory(meta.get("name"), gdrive_id) - self.__cloneFolder( - meta.get("name"), meta.get("name"), meta.get("id"), dir_id - ) - durl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - if self.__is_cancelled: - LOGGER.info("Deleting cloned data from Drive...") - self.deletefile(durl) - return None, None, None, None, None - mime_type = "Folder" - size = self.__processed_bytes - else: - file = self.__copyFile(meta.get("id"), gdrive_id, meta.get("name")) - msg += f'Name:{file.get("name")}
' - durl = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) - if mime_type is None: - mime_type = "File" - size = int(meta.get("size", 0)) - return durl, size, mime_type, self.__total_files, self.__total_folders - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - if "User rate limit exceeded" in err: - msg = "User rate limit exceeded." - elif "File not found" in err: - if not self.__alt_auth: - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - return self.clone(link) - msg = "File not found." - else: - msg = f"Error.\n{err}" - async_to_sync(self.__listener.onUploadError, msg) - return None, None, None, None, None - - def __cloneFolder(self, name, local_path, folder_id, dest_id): - LOGGER.info(f"Syncing: {local_path}") - files = self.getFilesByFolderId(folder_id) - if len(files) == 0: - return dest_id - for file in files: - if file.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: - self.__total_folders += 1 - file_path = ospath.join(local_path, file.get("name")) - current_dir_id = self.__create_directory(file.get("name"), dest_id) - self.__cloneFolder( - file.get("name"), file_path, file.get("id"), current_dir_id - ) - elif ( - not file.get("name").lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)) - ): - self.__total_files += 1 - self.__copyFile(file.get("id"), dest_id, file.get("name")) - self.__processed_bytes += int(file.get("size", 0)) - self.__total_time = int(time() - self.__start_time) - if self.__is_cancelled: - break - return None - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def __copyFile(self, file_id, dest_id, file_name): - file_name, _ = async_to_sync( - process_file, file_name, self.__user_id, is_mirror=True - ) - body = {"name": file_name, "parents": [dest_id]} - try: - return ( - self.__service.files() - .copy(fileId=file_id, body=body, supportsAllDrives=True) - .execute() - ) - except HttpError as err: - if err.resp.get("content-type", "").startswith("application/json"): - reason = ( - eval(err.content).get("error").get("errors")[0].get("reason") - ) - if reason not in [ - "userRateLimitExceeded", - "dailyLimitExceeded", - "cannotCopyFile", - ]: - raise err - if reason == "cannotCopyFile": - LOGGER.error(err) - elif config_dict["USE_SERVICE_ACCOUNTS"]: - if self.__sa_count >= self.__sa_number: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" - ) - raise err - if self.__is_cancelled: - return None - self.__switchServiceAccount() - return self.__copyFile(file_id, dest_id, file_name) - else: - LOGGER.error(f"Got: {reason}") - raise err - - def __escapes(self, estr): - chars = ["\\", "'", '"', r"\a", r"\b", r"\f", r"\n", r"\r", r"\t"] - for char in chars: - estr = estr.replace(char, f"\\{char}") - return estr.strip() - - def __get_recursive_list(self, file, rootid): - rtnlist = [] - if rootid == "root": - rootid = ( - self.__service.files() - .get(fileId="root", fields="id") - .execute() - .get("id") - ) - x = file.get("name") - y = file.get("id") - while y != rootid: - rtnlist.append(x) - file = ( - self.__service.files() - .get( - fileId=file.get("parents")[0], - supportsAllDrives=True, - fields="id, name, parents", - ) - .execute() - ) - x = file.get("name") - y = file.get("id") - rtnlist.reverse() - return rtnlist - - def __drive_query(self, dir_id, fileName, stopDup, isRecursive, itemType): - try: - if isRecursive: - if stopDup: - query = f"name = '{fileName}' and " - else: - fileName = fileName.split() - query = "".join( - f"name contains '{name}' and " - for name in fileName - if name != "" - ) - if itemType == "files": - query += ( - "mimeType != 'application/vnd.google-apps.folder' and " - ) - elif itemType == "folders": - query += ( - "mimeType = 'application/vnd.google-apps.folder' and " - ) - query += "trashed = false" - if dir_id == "root": - return ( - self.__service.files() - .list( - q=f"{query} and 'me' in owners", - pageSize=200, - spaces="drive", - fields="files(id, name, mimeType, size, parents)", - orderBy="folder, name asc", - ) - .execute() - ) - return ( - self.__service.files() - .list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - driveId=dir_id, - q=query, - spaces="drive", - pageSize=150, - fields="files(id, name, mimeType, size, teamDriveId, parents)", - corpora="drive", - orderBy="folder, name asc", - ) - .execute() - ) - if stopDup: - query = f"'{dir_id}' in parents and name = '{fileName}' and " - else: - query = f"'{dir_id}' in parents and " - fileName = fileName.split() - for name in fileName: - if name != "": - query += f"name contains '{name}' and " - if itemType == "files": - query += "mimeType != 'application/vnd.google-apps.folder' and " - elif itemType == "folders": - query += "mimeType = 'application/vnd.google-apps.folder' and " - query += "trashed = false" - return ( - self.__service.files() - .list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - q=query, - spaces="drive", - pageSize=150, - fields="files(id, name, mimeType, size)", - orderBy="folder, name asc", - ) - .execute() - ) - except Exception as err: - err = str(err).replace(">", "").replace("<", "") - LOGGER.error(err) - return {"files": []} - - def drive_list( - self, fileName, stopDup=False, noMulti=False, isRecursive=True, itemType="" - ): - msg = "" - fileName = self.__escapes(str(fileName)) - contents_no = 0 - telegraph_content = [] - Title = False - if len(list_drives_dict) > 1: - token_service = self.__alt_authorize() - if token_service is not None: - self.__service = token_service - for drive_name, drives_dict in list_drives_dict.items(): - dir_id = drives_dict["drive_id"] - index_url = drives_dict["index_link"] - isRecur = False if isRecursive and len(dir_id) > 23 else isRecursive - response = self.__drive_query( - dir_id, fileName, stopDup, isRecur, itemType - ) - if not response["files"]: - if noMulti: - break - continue - if not Title: - msg += f"Search Result For {fileName}
" - Title = True - if drive_name: - msg += f"╾────────────╼
{drive_name}
╾────────────╼
" - for file in response.get("files", []): - mime_type = file.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - furl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format( - file.get("id") - ) - msg += f"{file.get('name')}
(folder)
" - msg += f"Drive Link" - if index_url: - if isRecur: - url_path = "/".join( - [ - rquote(n, safe="") - for n in self.__get_recursive_list(file, dir_id) - ] - ) - else: - url_path = rquote(f'{file.get("name")}', safe="") - url = f"{index_url}/{url_path}/" - msg += f' Index Link' - elif mime_type == "application/vnd.google-apps.shortcut": - furl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format( - file.get("id") - ) - msg += f"⁍{file.get('name')} (shortcut)" - else: - furl = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) - msg += f"{file.get('name')}
({get_readable_file_size(int(file.get('size', 0)))})
" - msg += f"Drive Link" - if index_url: - if isRecur: - url_path = "/".join( - rquote(n, safe="") - for n in self.__get_recursive_list(file, dir_id) - ) - else: - url_path = rquote(f'{file.get("name")}') - url = f"{index_url}/{url_path}" - msg += f' Index Link' - msg += "
" - contents_no += 1 - if len(msg.encode("utf-8")) > 39000: - telegraph_content.append(msg) - msg = "" - if noMulti: - break - - if msg != "": - telegraph_content.append(msg) - - return telegraph_content, contents_no - - def count(self, link): - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - return ( - "Google Drive ID could not be found in the provided link", - None, - None, - None, - None, - ) - LOGGER.info(f"File ID: {file_id}") - try: - return self.__proceed_count(file_id) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - if "File not found" in err: - if not self.__alt_auth: - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - return self.count(link) - msg = "File not found." - else: - msg = f"Error.\n{err}" - return msg, None, None, None, None - - def __proceed_count(self, file_id): - meta = self.__getFileMetadata(file_id) - name = meta["name"] - LOGGER.info(f"Counting: {name}") - mime_type = meta.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.__gDrive_directory(meta) - mime_type = "Folder" - else: - if mime_type is None: - mime_type = "File" - self.__total_files += 1 - self.__gDrive_file(meta) - return ( - name, - mime_type, - self.__total_bytes, - self.__total_files, - self.__total_folders, - ) - - def __gDrive_file(self, filee): - size = int(filee.get("size", 0)) - self.__total_bytes += size - - def __gDrive_directory(self, drive_folder): - files = self.getFilesByFolderId(drive_folder["id"]) - if len(files) == 0: - return - for filee in files: - shortcut_details = filee.get("shortcutDetails") - if shortcut_details is not None: - mime_type = shortcut_details["targetMimeType"] - file_id = shortcut_details["targetId"] - filee = self.__getFileMetadata(file_id) - else: - mime_type = filee.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.__total_folders += 1 - self.__gDrive_directory(filee) - else: - self.__total_files += 1 - self.__gDrive_file(filee) - - def download(self, link): - self.__is_downloading = True - file_id = self.getIdFromUrl(link) - self.__updater = SetInterval(self.__update_interval, self.__progress) - try: - meta = self.__getFileMetadata(file_id) - if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: - self.__download_folder(file_id, self.__path, self.name) - else: - makedirs(self.__path, exist_ok=True) - self.__download_file( - file_id, self.__path, self.name, meta.get("mimeType") - ) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - if "downloadQuotaExceeded" in err: - err = "Download Quota Exceeded." - elif "File not found" in err: - if not self.__alt_auth: - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - self.__updater.cancel() - return self.download(link) - err = "File not found!" - async_to_sync(self.__listener.onDownloadError, err) - self.__is_cancelled = True - finally: - self.__updater.cancel() - if self.__is_cancelled: - return None - async_to_sync(self.__listener.on_download_complete) - - def __download_folder(self, folder_id, path, folder_name): - folder_name = folder_name.replace("/", "") - if not ospath.exists(f"{path}/{folder_name}"): - makedirs(f"{path}/{folder_name}") - path += f"/{folder_name}" - result = self.getFilesByFolderId(folder_id) - if len(result) == 0: - return - result = sorted(result, key=lambda k: k["name"]) - for item in result: - file_id = item["id"] - filename = item["name"] - shortcut_details = item.get("shortcutDetails") - if shortcut_details is not None: - file_id = shortcut_details["targetId"] - mime_type = shortcut_details["targetMimeType"] - else: - mime_type = item.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.__download_folder(file_id, path, filename) - elif not ospath.isfile( - f"{path}{filename}" - ) and not filename.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - self.__download_file(file_id, path, filename, mime_type) - if self.__is_cancelled: - break - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=(retry_if_exception_type(Exception)), - ) - def __download_file(self, file_id, path, filename, mime_type): - request = self.__service.files().get_media( - fileId=file_id, supportsAllDrives=True - ) - filename = filename.replace("/", "") - if len(filename.encode()) > 255: - ext = ospath.splitext(filename)[1] - filename = f"{filename[:245]}{ext}" - if self.name.endswith(ext): - self.name = filename - if self.__is_cancelled: - return None - fh = FileIO(f"{path}/{filename}", "wb") - downloader = MediaIoBaseDownload(fh, request, chunksize=100 * 1024 * 1024) - done = False - retries = 0 - while not done: - if self.__is_cancelled: - fh.close() - break - try: - self.__status, done = downloader.next_chunk() - except HttpError as err: - if err.resp.status in [500, 502, 503, 504] and retries < 10: - retries += 1 - continue - if err.resp.get("content-type", "").startswith("application/json"): - reason = ( - eval(err.content).get("error").get("errors")[0].get("reason") - ) - if reason not in [ - "downloadQuotaExceeded", - "dailyLimitExceeded", - ]: - raise err - if config_dict["USE_SERVICE_ACCOUNTS"]: - if self.__sa_count >= self.__sa_number: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" - ) - raise err - if self.__is_cancelled: - return None - self.__switchServiceAccount() - LOGGER.info(f"Got: {reason}, Trying Again...") - return self.__download_file( - file_id, path, filename, mime_type - ) - LOGGER.error(f"Got: {reason}") - raise err - self.__file_processed_bytes = 0 - return None - - async def cancel_download(self): - self.__is_cancelled = True - if self.__is_downloading: - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError("Download stopped by user!") - elif self.__is_cloning: - LOGGER.info(f"Cancelling Clone: {self.name}") - await self.__listener.onUploadError( - "your clone has been stopped and cloned data has been deleted!" - ) - elif self.__is_uploading: - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError("Cancelled by user!") diff --git a/bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py b/bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py deleted file mode 100644 index 982ff2a56..000000000 --- a/bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py +++ /dev/null @@ -1,688 +0,0 @@ -import contextlib -from os import path as ospath -from os import walk -from re import match as re_match -from time import time -from asyncio import sleep -from logging import ERROR, getLogger -from traceback import format_exc - -from PIL import Image -from natsort import natsorted -from tenacity import ( - RetryError, - retry, - wait_exponential, - stop_after_attempt, - retry_if_exception_type, -) -from aioshutil import copy -from aiofiles.os import path as aiopath -from aiofiles.os import mkdir, makedirs -from aiofiles.os import remove as aioremove -from aiofiles.os import rename as aiorename -from pyrogram.types import InputMediaVideo, InputMediaDocument -from pyrogram.errors import ( - FloodWait, - PeerIdInvalid, - ChannelInvalid, - MessageNotModified, -) - -from bot import ( - IS_PREMIUM_USER, - GLOBAL_EXTENSION_FILTER, - bot, - user, - user_data, - config_dict, -) -from bot.helper.aeon_utils.metadata import add_attachment -from bot.helper.ext_utils.bot_utils import ( - is_mkv, - is_url, - sync_to_async, - is_telegram_link, - download_image_url, -) -from bot.helper.ext_utils.files_utils import ( - get_ss, - take_ss, - is_archive, - process_file, - get_base_name, - clean_unwanted, - get_media_info, - get_audio_thumb, - get_document_type, - get_mediainfo_link, -) -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - chat_info, - delete_message, - sendMultiMessage, - get_tg_link_content, -) - -LOGGER = getLogger(__name__) -getLogger("pyrogram").setLevel(ERROR) - - -class TgUploader: - def __init__(self, name=None, path=None, listener=None): - self.name = name - self.__last_uploaded = 0 - self.__processed_bytes = 0 - self.__listener = listener - self.__path = path - self.__start_time = time() - self.__total_files = 0 - self.__is_cancelled = False - self.__sent_msg = None - self.__has_buttons = False - self.__msgs_dict = {} - self.__corrupted = 0 - self.__is_corrupted = False - self.__media_dict = {"videos": {}, "documents": {}} - self.__last_msg_in_group = False - self.__prm_media = False - self.__client = bot - self.__up_path = "" - self.__ldump = "" - self.__mediainfo = False - self.__as_doc = False - self.__media_group = False - self.__bot_pm = False - self.__user_id = listener.message.from_user.id - self.__leechmsg = {} - self.__files_utils = self.__listener.files_utils - self.__thumb = f"Thumbnails/{listener.message.from_user.id}.jpg" - - async def get_custom_thumb(self, thumb): - if is_telegram_link(thumb): - try: - msg, client = await get_tg_link_content(thumb) - except Exception as e: - LOGGER.error(f"Thumb Access Error: {e}") - return None - if msg and not msg.photo: - LOGGER.error("Thumb TgLink Invalid: Provide Link to Photo Only !") - return None - _client = bot if client == "bot" else user - photo_dir = await _client.download_media(msg) - elif is_url(thumb): - photo_dir = await download_image_url(thumb) - else: - LOGGER.error("Custom Thumb Invalid") - return None - if await aiopath.exists(photo_dir): - path = "Thumbnails" - if not await aiopath.isdir(path): - await mkdir(path) - des_dir = ospath.join(path, f"{time()}.jpg") - await sync_to_async( - Image.open(photo_dir).convert("RGB").save, des_dir, "JPEG" - ) - await aioremove(photo_dir) - return des_dir - return None - - async def __buttons(self, up_path, is_video=False): - buttons = ButtonMaker() - try: - if is_video and bool(self.__files_utils["screenshots"]): - buttons.url( - "SCREENSHOTS", - await get_ss(up_path, self.__files_utils["screenshots"]), - ) - except Exception as e: - LOGGER.error(f"ScreenShots Error: {e}") - try: - if self.__mediainfo: - m = await get_mediainfo_link(up_path) - buttons.url("MediaInfo", m) - LOGGER.info(m) - except Exception as e: - LOGGER.error(f"MediaInfo Error: {e!s}") - return buttons.column(1) if self.__has_buttons else None - - async def __copy_file(self): - try: - if self.__bot_pm and (self.__leechmsg or self.__listener.isSuperGroup): - destination = "Bot PM" - copied = await bot.copy_message( - chat_id=self.__user_id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - reply_to_message_id=self.__listener.botpmmsg.id - if self.__listener.botpmmsg - else None, - ) - if self.__has_buttons: - rply = self.__sent_msg.reply_markup - with contextlib.suppress(MessageNotModified): - await copied.edit_reply_markup(rply) - if len(self.__leechmsg) > 1: - for chat_id, msg in list(self.__leechmsg.items())[1:]: - destination = f"Leech Log: {chat_id}" - self.__leechmsg[chat_id] = await bot.copy_message( - chat_id=chat_id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - reply_to_message_id=msg.id, - ) - if msg.text: - await delete_message(msg) - if self.__has_buttons: - with contextlib.suppress(MessageNotModified): - await self.__leechmsg[chat_id].edit_reply_markup( - self.__sent_msg.reply_markup - ) - - if self.__ldump: - destination = "User Dump" - for channel_id in self.__ldump.split(): - chat = await chat_info(channel_id) - try: - dump_copy = await bot.copy_message( - chat_id=chat.id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - ) - if self.__has_buttons: - rply = self.__sent_msg.reply_markup - with contextlib.suppress(MessageNotModified): - await dump_copy.edit_reply_markup(rply) - except (ChannelInvalid, PeerIdInvalid) as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - continue - except Exception as err: - if not self.__is_cancelled: - LOGGER.error(f"Failed To Send in {destination}:\n{err!s}") - - async def __upload_progress(self, current, total): - if self.__is_cancelled: - if IS_PREMIUM_USER: - user.stop_transmission() - bot.stop_transmission() - chunk_size = current - self.__last_uploaded - self.__last_uploaded = current - self.__processed_bytes += chunk_size - - async def __user_settings(self): - user_dict = user_data.get(self.__user_id, {}) - self.__as_doc = user_dict.get("as_doc") or config_dict["AS_DOCUMENT"] - self.__media_group = ( - user_dict.get("media_group") or config_dict["MEDIA_GROUP"] - ) - self.__bot_pm = True - self.__mediainfo = config_dict["SHOW_MEDIAINFO"] or user_dict.get( - "mediainfo" - ) - self.__ldump = user_dict.get("ldump", "") or "" - self.__has_buttons = bool( - self.__mediainfo or self.__files_utils["screenshots"] - ) - if not await aiopath.exists(self.__thumb): - self.__thumb = None - - async def __msg_to_reply(self): - msg_user = self.__listener.message.from_user - if config_dict["LEECH_DUMP_ID"]: - try: - mention = msg_user.mention(style="HTML") - uid = msg_user.id - msg = f"Task started\n\n• User: {mention}\n• ID:{uid}
" - self.__leechmsg = await sendMultiMessage( - config_dict["LEECH_DUMP_ID"], msg - ) - except Exception as er: - await self.__listener.onUploadError(str(er)) - return False - self.__sent_msg = next(iter(self.__leechmsg.values())) - elif IS_PREMIUM_USER: - if not self.__listener.isSuperGroup: - await self.__listener.onUploadError( - "Use SuperGroup to leech with User Client! or Set LEECH_DUMP_ID to Leech in PM" - ) - return False - self.__sent_msg = self.__listener.message - else: - self.__sent_msg = self.__listener.message - return True - - async def __prepare_file(self, prefile_, dirpath): - file_, cap_mono = await process_file(prefile_, self.__user_id, dirpath) - if (atc := self.__listener.attachment) and is_mkv(prefile_): - file_ = await add_attachment(prefile_, dirpath, atc) - if prefile_ != file_: - if ( - self.__listener.seed - and not self.__listener.newDir - and not dirpath.endswith("/splited_files") - ): - dirpath = f"{dirpath}/copied" - await makedirs(dirpath, exist_ok=True) - new_path = ospath.join(dirpath, file_) - self.__up_path = await copy(self.__up_path, new_path) - else: - new_path = ospath.join(dirpath, file_) - await aiorename(self.__up_path, new_path) - self.__up_path = new_path - if len(file_) > 64: - if is_archive(file_): - name = get_base_name(file_) - ext = file_.split(name, 1)[1] - elif match := re_match(r".+(?=\..+\.0*\d+$)|.+(?=\.part\d+\..+)", file_): - name = match.group(0) - ext = file_.split(name, 1)[1] - elif len(fsplit := ospath.splitext(file_)) > 1: - name = fsplit[0] - ext = fsplit[1] - else: - name = file_ - ext = "" - extn = len(ext) - remain = 64 - extn - name = name[:remain] - if ( - self.__listener.seed - and not self.__listener.newDir - and not dirpath.endswith("/splited_files") - ): - dirpath = f"{dirpath}/copied" - await makedirs(dirpath, exist_ok=True) - new_path = ospath.join(dirpath, f"{name}{ext}") - self.__up_path = await copy(self.__up_path, new_path) - else: - new_path = ospath.join(dirpath, f"{name}{ext}") - await aiorename(self.__up_path, new_path) - self.__up_path = new_path - return cap_mono, file_ - - def __get_input_media(self, subkey, key): - rlist = [] - for msg in self.__media_dict[key][subkey]: - if key == "videos": - input_media = InputMediaVideo( - media=msg.video.file_id, caption=msg.caption - ) - else: - input_media = InputMediaDocument( - media=msg.document.file_id, caption=msg.caption - ) - rlist.append(input_media) - return rlist - - async def __switching_client(self): - LOGGER.info( - f'Uploading Media {">" if self.__prm_media else "<"} 2GB by {"User" if self.__prm_media else "Bot"} Client' - ) - self.__client = user if (self.__prm_media and IS_PREMIUM_USER) else bot - - async def __send_media_group(self, subkey, key, msgs): - msgs_list = await msgs[0].reply_to_message.reply_media_group( - media=self.__get_input_media(subkey, key), - quote=True, - disable_notification=True, - ) - for msg in msgs: - if msg.link in self.__msgs_dict: - del self.__msgs_dict[msg.link] - await delete_message(msg) - del self.__media_dict[key][subkey] - if self.__listener.isSuperGroup or config_dict["LEECH_DUMP_ID"]: - for m in msgs_list: - self.__msgs_dict[m.link] = m.caption - self.__sent_msg = msgs_list[-1] - try: - if self.__bot_pm and (self.__leechmsg or self.__listener.isSuperGroup): - destination = "Bot PM" - await bot.copy_media_group( - chat_id=self.__user_id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - ) - if self.__ldump: - destination = "Dump" - for channel_id in self.__ldump.split(): - dump_chat = await chat_info(channel_id) - try: - await bot.copy_media_group( - chat_id=dump_chat.id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - ) - except (ChannelInvalid, PeerIdInvalid) as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - continue - except Exception as err: - if not self.__is_cancelled: - LOGGER.error(f"Failed To Send in {destination}:\n{err!s}") - - async def upload(self, o_files, m_size, size): - await self.__user_settings() - res = await self.__msg_to_reply() - if not res: - return - isDeleted = False - for dirpath, _, files in sorted(await sync_to_async(walk, self.__path)): - if dirpath.endswith("/yt-dlp-thumb"): - continue - for file_ in natsorted(files): - self.__up_path = ospath.join(dirpath, file_) - if file_.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - await aioremove(self.__up_path) - continue - try: - f_size = await aiopath.getsize(self.__up_path) - if ( - self.__listener.seed - and file_ in o_files - and f_size in m_size - ): - continue - self.__total_files += 1 - if f_size == 0: - LOGGER.error( - f"{self.__up_path} size is zero, telegram don't upload zero size files" - ) - self.__corrupted += 1 - continue - if self.__is_cancelled: - return - self.__prm_media = f_size > 2097152000 - cap_mono, file_ = await self.__prepare_file(file_, dirpath) - if self.__last_msg_in_group: - group_lists = [ - x for v in self.__media_dict.values() for x in v - ] - if ( - match := re_match( - r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+)", self.__up_path - ) - ) and match.group(0) not in group_lists: - for key, value in list(self.__media_dict.items()): - for subkey, msgs in list(value.items()): - if len(msgs) > 1: - await self.__send_media_group( - subkey, key, msgs - ) - self.__last_msg_in_group = False - self.__last_uploaded = 0 - await self.__switching_client() - await self.__upload_file(cap_mono, file_) - if not isDeleted: - values_list = list(self.__leechmsg.values()) - if values_list: - await delete_message(values_list[0]) - isDeleted = True - if self.__is_cancelled: - return - if not self.__is_corrupted and ( - self.__listener.isSuperGroup or config_dict["LEECH_DUMP_ID"] - ): - self.__msgs_dict[self.__sent_msg.link] = file_ - await sleep(1) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info( - f"Total Attempts: {err.last_attempt.attempt_number}" - ) - else: - LOGGER.error(f"{format_exc()}. Path: {self.__up_path}") - if self.__is_cancelled: - return - continue - finally: - if ( - not self.__is_cancelled - and await aiopath.exists(self.__up_path) - and ( - not self.__listener.seed - or self.__listener.newDir - or dirpath.endswith("/splited_files") - or "/copied/" in self.__up_path - ) - ): - await aioremove(self.__up_path) - for key, value in list(self.__media_dict.items()): - for subkey, msgs in list(value.items()): - if len(msgs) > 1: - await self.__send_media_group(subkey, key, msgs) - if self.__is_cancelled: - return - if self.__listener.seed and not self.__listener.newDir: - await clean_unwanted(self.__path) - if self.__total_files == 0: - await self.__listener.onUploadError( - "No files to upload. In case you have filled EXTENSION_FILTER, then check if all files have those extensions or not." - ) - return - if self.__total_files <= self.__corrupted: - await self.__listener.onUploadError( - "Files Corrupted or unable to upload. Check logs!" - ) - return - LOGGER.info(f"Leech Completed: {self.name}") - await self.__listener.onUploadComplete( - None, - size, - self.__msgs_dict, - self.__total_files, - self.__corrupted, - self.name, - ) - - @retry( - wait=wait_exponential(multiplier=2, min=4, max=8), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - async def __upload_file(self, cap_mono, file, force_document=False): - if self.__thumb is not None and not await aiopath.exists(self.__thumb): - self.__thumb = None - thumb = self.__thumb - self.__is_corrupted = False - try: - is_video, is_audio, is_image = await get_document_type(self.__up_path) - - if self.__files_utils["thumb"]: - thumb = await self.get_custom_thumb(self.__files_utils["thumb"]) - if not is_image and thumb is None: - file_name = ospath.splitext(file)[0] - thumb_path = f"{self.__path}/yt-dlp-thumb/{file_name}.jpg" - if await aiopath.isfile(thumb_path): - thumb = thumb_path - elif is_audio and not is_video: - thumb = await get_audio_thumb(self.__up_path) - - if ( - self.__as_doc - or force_document - or (not is_video and not is_audio and not is_image) - ): - key = "documents" - if is_video and thumb is None: - thumb = await take_ss(self.__up_path, None) - if self.__is_cancelled: - return None - buttons = await self.__buttons(self.__up_path, is_video) - nrml_media = await self.__client.send_document( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - document=self.__up_path, - thumb=thumb, - caption=cap_mono, - force_document=True, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=buttons, - ) - - if self.__prm_media and (self.__has_buttons or not self.__leechmsg): - try: - self.__sent_msg = await bot.copy_message( - nrml_media.chat.id, - nrml_media.chat.id, - nrml_media.id, - reply_to_message_id=self.__sent_msg.id, - reply_markup=buttons, - ) - if self.__sent_msg: - await delete_message(nrml_media) - except Exception: - self.__sent_msg = nrml_media - else: - self.__sent_msg = nrml_media - elif is_video: - key = "videos" - duration = (await get_media_info(self.__up_path))[0] - if thumb is None: - thumb = await take_ss(self.__up_path, duration) - if thumb is not None: - with Image.open(thumb) as img: - width, height = img.size - else: - width = 480 - height = 320 - if not self.__up_path.upper().endswith(("MKV", "MP4")): - dirpath, file_ = self.__up_path.rsplit("/", 1) - if ( - self.__listener.seed - and not self.__listener.newDir - and not dirpath.endswith("/splited_files") - ): - dirpath = f"{dirpath}/copied" - await makedirs(dirpath, exist_ok=True) - new_path = ospath.join( - dirpath, f"{ospath.splitext(file_)[0]}.mkv" - ) - self.__up_path = await copy(self.__up_path, new_path) - else: - new_path = f"{ospath.splitext(self.__up_path)[0]}.mkv" - await aiorename(self.__up_path, new_path) - self.__up_path = new_path - if self.__is_cancelled: - return None - buttons = await self.__buttons(self.__up_path, is_video) - nrml_media = await self.__client.send_video( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - video=self.__up_path, - caption=cap_mono, - duration=duration, - width=width, - height=height, - thumb=thumb, - supports_streaming=True, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=buttons, - ) - if self.__prm_media and (self.__has_buttons or not self.__leechmsg): - try: - self.__sent_msg = await bot.copy_message( - nrml_media.chat.id, - nrml_media.chat.id, - nrml_media.id, - reply_to_message_id=self.__sent_msg.id, - reply_markup=buttons, - ) - if self.__sent_msg: - await delete_message(nrml_media) - except Exception: - self.__sent_msg = nrml_media - else: - self.__sent_msg = nrml_media - elif is_audio: - key = "audios" - duration, artist, title = await get_media_info(self.__up_path) - if self.__is_cancelled: - return None - self.__sent_msg = await self.__client.send_audio( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - audio=self.__up_path, - caption=cap_mono, - duration=duration, - performer=artist, - title=title, - thumb=thumb, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=await self.__buttons(self.__up_path), - ) - else: - key = "photos" - if self.__is_cancelled: - return None - self.__sent_msg = await self.__client.send_photo( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - photo=self.__up_path, - caption=cap_mono, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=await self.__buttons(self.__up_path), - ) - - if ( - not self.__is_cancelled - and self.__media_group - and (self.__sent_msg.video or self.__sent_msg.document) - ): - key = "documents" if self.__sent_msg.document else "videos" - if match := re_match( - r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+)", self.__up_path - ): - pname = match.group(0) - if pname in self.__media_dict[key]: - self.__media_dict[key][pname].append(self.__sent_msg) - else: - self.__media_dict[key][pname] = [self.__sent_msg] - msgs = self.__media_dict[key][pname] - if len(msgs) == 10: - await self.__send_media_group(pname, key, msgs) - else: - self.__last_msg_in_group = True - await self.__copy_file() - - if ( - self.__thumb is None - and thumb is not None - and await aiopath.exists(thumb) - ): - await aioremove(thumb) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value) - except Exception as err: - if ( - self.__thumb is None - and thumb is not None - and await aiopath.exists(thumb) - ): - await aioremove(thumb) - LOGGER.error(f"{format_exc()}. Path: {self.__up_path}") - if "Telegram says: [400" in str(err) and key != "documents": - LOGGER.error(f"Retrying As Document. Path: {self.__up_path}") - return await self.__upload_file(cap_mono, file, True) - raise err - - @property - def speed(self): - try: - return self.__processed_bytes / (time() - self.__start_time) - except Exception: - return 0 - - @property - def processed_bytes(self): - return self.__processed_bytes - - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError("Cancelled by user!") diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py index 913b83790..a9c858159 100644 --- a/bot/helper/telegram_helper/bot_commands.py +++ b/bot/helper/telegram_helper/bot_commands.py @@ -1,40 +1,39 @@ -from bot import CMD_SUFFIX as i +from bot import config_dict + +i = config_dict["CMD_SUFFIX"] class _BotCommands: def __init__(self): - self.StartCommand = "start" - self.MirrorCommand = [f"mirror{i}", f"m{i}"] - self.YtdlCommand = [f"ytdl{i}", f"y{i}"] - self.LeechCommand = [f"leech{i}", f"l{i}"] - self.YtdlLeechCommand = [f"ytdlleech{i}", f"yl{i}"] - self.CloneCommand = [f"clone{i}", f"c{i}"] - self.CountCommand = f"count{i}" - self.DeleteCommand = f"del{i}" - self.StopAllCommand = [f"stopall{i}", "stopallbot"] - self.ListCommand = f"list{i}" - self.SearchCommand = f"search{i}" - self.StatusCommand = [f"status{i}", "statusall"] - self.UsersCommand = f"users{i}" - self.AuthorizeCommand = f"authorize{i}" - self.UnAuthorizeCommand = f"unauthorize{i}" - self.AddSudoCommand = f"addsudo{i}" - self.RmSudoCommand = f"rmsudo{i}" - self.PingCommand = "ping" - self.RestartCommand = [f"restart{i}", "restartall"] - self.StatsCommand = [f"stats{i}", "statsall"] - self.HelpCommand = f"help{i}" - self.LogCommand = f"log{i}" - self.ShellCommand = f"shell{i}" - self.EvalCommand = f"eval{i}" - self.ExecCommand = f"exec{i}" - self.BotSetCommand = f"botsettings{i}" - self.UserSetCommand = f"settings{i}" - self.SpeedCommand = f"speedtest{i}" - self.AddImageCommand = f"addimg{i}" - self.ImagesCommand = f"images{i}" - self.MediaInfoCommand = f"mediainfo{i}" - self.BroadcastCommand = [f"broadcast{i}", "broadcastall"] + self.StartCommand: str = "start" + self.MirrorCommand: list[str] = [f"mirror{i}", f"m{i}"] + self.YtdlCommand: list[str] = [f"ytdl{i}", f"y{i}"] + self.LeechCommand: list[str] = [f"leech{i}", f"l{i}"] + self.YtdlLeechCommand: list[str] = [f"ytdlleech{i}", f"yl{i}"] + self.CloneCommand: list[str] = [f"clone{i}", f"c{i}"] + self.CountCommand: str = f"count{i}" + self.DeleteCommand: str = f"del{i}" + self.CancelAllCommand: str = f"stopall{i}" + self.ListCommand: str = f"list{i}" + self.SearchCommand: str = f"search{i}" + self.StatusCommand: list[str] = [f"status{i}", "statusall"] + self.UsersCommand: str = f"users{i}" + self.AuthorizeCommand: str = f"authorize{i}" + self.UnAuthorizeCommand: str = f"unauthorize{i}" + self.AddSudoCommand: str = f"addsudo{i}" + self.RmSudoCommand: str = f"rmsudo{i}" + self.PingCommand: str = f"ping{i}" + self.RestartCommand: list[str] = [f"restart{i}", "restartall"] + self.StatsCommand: list[str] = [f"stats{i}", "statsall"] + self.HelpCommand: str = f"help{i}" + self.LogCommand: str = f"log{i}" + self.ShellCommand: str = f"shell{i}" + self.AExecCommand: str = f"aexec{i}" + self.ExecCommand: str = f"exec{i}" + self.BotSetCommand: str = f"botsettings{i}" + self.UserSetCommand: str = f"settings{i}" + self.MediaInfoCommand: str = f"mediainfo{i}" + self.BroadcastCommand: list[str] = [f"broadcast{i}", "broadcastall"] BotCommands = _BotCommands() diff --git a/bot/helper/telegram_helper/button_build.py b/bot/helper/telegram_helper/button_build.py index 6b760c15f..e002fabb7 100644 --- a/bot/helper/telegram_helper/button_build.py +++ b/bot/helper/telegram_helper/button_build.py @@ -3,52 +3,55 @@ class ButtonMaker: def __init__(self): - self.main_buttons = [] - self.header_buttons = [] - self.footer_buttons = [] - - def url(self, text, url, position=None): - button = InlineKeyboardButton(text=text, url=url) - if position == "header": - self.header_buttons.append(button) + self._button = [] + self._header_button = [] + self._footer_button = [] + + def url(self, key, link, position=None): + if not position: + self._button.append(InlineKeyboardButton(text=key, url=link)) + elif position == "header": + self._header_button.append(InlineKeyboardButton(text=key, url=link)) elif position == "footer": - self.footer_buttons.append(button) - else: - self.main_buttons.append(button) - - def callback(self, text, callback_data, position=None): - button = InlineKeyboardButton(text=text, callback_data=callback_data) - if position == "header": - self.header_buttons.append(button) + self._footer_button.append(InlineKeyboardButton(text=key, url=link)) + + def callback(self, key, data, position=None): + if not position: + self._button.append(InlineKeyboardButton(text=key, callback_data=data)) + elif position == "header": + self._header_button.append( + InlineKeyboardButton(text=key, callback_data=data) + ) elif position == "footer": - self.footer_buttons.append(button) - else: - self.main_buttons.append(button) + self._footer_button.append( + InlineKeyboardButton(text=key, callback_data=data) + ) - def column(self, main_columns=1, header_columns=8, footer_columns=8): - keyboard = [ - self.main_buttons[i : i + main_columns] - for i in range(0, len(self.main_buttons), main_columns) + def menu(self, b_cols=1, h_cols=8, f_cols=8): + menu = [ + self._button[i : i + b_cols] for i in range(0, len(self._button), b_cols) ] - - if self.header_buttons: - if len(self.header_buttons) > header_columns: - header_chunks = [ - self.header_buttons[i : i + header_columns] - for i in range(0, len(self.header_buttons), header_columns) + if self._header_button: + h_cnt = len(self._header_button) + if h_cnt > h_cols: + header_buttons = [ + self._header_button[i : i + h_cols] + for i in range(0, len(self._header_button), h_cols) ] - keyboard = header_chunks + keyboard + menu = header_buttons + menu else: - keyboard.insert(0, self.header_buttons) - - if self.footer_buttons: - if len(self.footer_buttons) > footer_columns: - footer_chunks = [ - self.footer_buttons[i : i + footer_columns] - for i in range(0, len(self.footer_buttons), footer_columns) + menu.insert(0, self._header_button) + if self._footer_button: + if len(self._footer_button) > f_cols: + [ + menu.append(self._footer_button[i : i + f_cols]) + for i in range(0, len(self._footer_button), f_cols) ] - keyboard += footer_chunks else: - keyboard.append(self.footer_buttons) + menu.append(self._footer_button) + return InlineKeyboardMarkup(menu) - return InlineKeyboardMarkup(keyboard) + def reset(self): + self._button = [] + self._header_button = [] + self._footer_button = [] diff --git a/bot/helper/telegram_helper/filters.py b/bot/helper/telegram_helper/filters.py index ca41e25fb..a5a5a8d0b 100644 --- a/bot/helper/telegram_helper/filters.py +++ b/bot/helper/telegram_helper/filters.py @@ -2,22 +2,23 @@ from pyrogram.filters import create from bot import OWNER_ID, user_data -from bot.helper.telegram_helper.message_utils import chat_info +from bot.helper.aeon_utils.access_check import get_chat_info class CustomFilters: - async def owner_filter(self, _, message): + @staticmethod + async def owner_filter(_, message): user = message.from_user or message.sender_chat - uid = user.id - return uid == OWNER_ID + return user.id == OWNER_ID owner = create(owner_filter) - async def authorized_user(self, _, message): + @staticmethod + async def authorized_user(_, message): user = message.from_user or message.sender_chat uid = user.id chat_id = message.chat.id - return bool( + return ( uid == OWNER_ID or ( uid in user_data @@ -31,10 +32,12 @@ async def authorized_user(self, _, message): authorized = create(authorized_user) - async def authorized_usetting(self, _, message): - uid = (message.from_user or message.sender_chat).id + @staticmethod + async def authorized_usetting(_, message): + user = message.from_user or message.sender_chat + uid = user.id chat_id = message.chat.id - isExists = False + if ( uid == OWNER_ID or ( @@ -46,29 +49,29 @@ async def authorized_usetting(self, _, message): ) or (chat_id in user_data and user_data[chat_id].get("is_auth", False)) ): - isExists = True - elif message.chat.type == ChatType.PRIVATE: - for channel_id in user_data: - if not ( - user_data[channel_id].get("is_auth") - and str(channel_id).startswith("-100") - ): - continue - try: - if await (await chat_info(str(channel_id))).get_member(uid): - isExists = True - break - except Exception: - continue - return isExists + return True + + if message.chat.type == ChatType.PRIVATE: + for channel_id, data in user_data.items(): + if data.get("is_auth") and str(channel_id).startswith("-100"): + try: + if await (await get_chat_info(str(channel_id))).get_member( + uid + ): + return True + except Exception: + continue + + return False authorized_uset = create(authorized_usetting) - async def sudo_user(self, _, message): + @staticmethod + async def sudo_user(_, message): user = message.from_user or message.sender_chat uid = user.id - return bool( - uid == OWNER_ID or uid in user_data and user_data[uid].get("is_sudo") + return uid == OWNER_ID or ( + uid in user_data and user_data[uid].get("is_sudo", False) ) sudo = create(sudo_user) diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index 9576765d3..6efe34496 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -1,209 +1,98 @@ from re import match as re_match from time import time -from random import choice from asyncio import sleep -from traceback import format_exc -from aiofiles.os import remove as aioremove +from pyrogram import Client, enums +from cachetools import TTLCache from pyrogram.types import InputMediaPhoto -from pyrogram.errors import ( - RPCError, - FloodWait, - MediaEmpty, - MessageEmpty, - PeerIdInvalid, - WebpageCurlFailed, - MessageNotModified, - ReplyMarkupInvalid, - UserNotParticipant, - PhotoInvalidDimensions, -) +from pyrogram.errors import FloodWait, MessageEmpty, MessageNotModified from bot import ( - IMAGES, LOGGER, - DELETE_LINKS, - Interval, + TELEGRAM_API, + TELEGRAM_HASH, + Intervals, bot, user, - status_reply_dict, - download_dict_lock, - status_reply_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - SetInterval, - sync_to_async, - download_image_url, - get_readable_message, + user_data, + status_dict, + task_dict_lock, ) -from bot.helper.ext_utils.exceptions import TgLinkError -from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.ext_utils.bot_utils import setInterval +from bot.helper.ext_utils.exceptions import TgLinkException +from bot.helper.ext_utils.status_utils import get_readable_message +session_cache = TTLCache(maxsize=1000, ttl=36000) -async def send_message(message, text, buttons=None, photo=None): + +async def send_message( + message, text, buttons=None, block=True, photo=None, MARKDOWN=False +): + parse_mode = enums.ParseMode.MARKDOWN if MARKDOWN else enums.ParseMode.HTML try: + if isinstance(message, int): + return await bot.send_message( + chat_id=message, + text=text, + disable_web_page_preview=True, + disable_notification=True, + reply_markup=buttons, + parse_mode=parse_mode, + ) if photo: - try: - if photo == "Random": - photo = choice(IMAGES) - return await message.reply_photo( - photo=photo, - reply_to_message_id=message.id, - caption=text, - reply_markup=buttons, - disable_notification=True, - ) - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await send_message(message, text, buttons, des_dir) - await aioremove(des_dir) - return None - except Exception: - LOGGER.error(format_exc()) + return await message.reply_photo( + photo=photo, + reply_to_message_id=message.id, + caption=text, + reply_markup=buttons, + disable_notification=True, + parse_mode=parse_mode, + ) return await message.reply( text=text, quote=True, disable_web_page_preview=True, disable_notification=True, reply_markup=buttons, + parse_mode=parse_mode, ) except FloodWait as f: LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await send_message(message, text, buttons, photo) - except ReplyMarkupInvalid: - return await send_message(message, text, None, photo) + if block: + await sleep(f.value * 1.2) + return await send_message(message, text, buttons, block, photo, MARKDOWN) + return str(f) except Exception as e: - LOGGER.error(format_exc()) + LOGGER.error(str(e)) return str(e) -async def sendCustomMsg(chat_id, text, buttons=None, photo=None): +async def edit_message( + message, text, buttons=None, block=True, photo=None, MARKDOWN=False +): + parse_mode = enums.ParseMode.MARKDOWN if MARKDOWN else enums.ParseMode.HTML try: - if photo: - try: - if photo == "Random": - photo = choice(IMAGES) - return await bot.send_photo( - chat_id=chat_id, - photo=photo, - caption=text, + if message.media: + if photo: + return await message.edit_media( + InputMediaPhoto(photo, text), reply_markup=buttons, - disable_notification=True, + parse_mode=parse_mode, ) - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendCustomMsg(chat_id, text, buttons, des_dir) - await aioremove(des_dir) - return None - except Exception: - LOGGER.error(format_exc()) - return await bot.send_message( - chat_id=chat_id, + return await message.edit_caption( + caption=text, reply_markup=buttons, parse_mode=parse_mode + ) + await message.edit( text=text, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons, + parse_mode=parse_mode, ) except FloodWait as f: LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendCustomMsg(chat_id, text, buttons, photo) - except ReplyMarkupInvalid: - return await sendCustomMsg(chat_id, text, None, photo) - except Exception as e: - LOGGER.error(format_exc()) - return str(e) - - -async def chat_info(channel_id): - if channel_id.startswith("-100"): - channel_id = int(channel_id) - elif channel_id.startswith("@"): - channel_id = channel_id.replace("@", "") - else: - return None - try: - return await bot.get_chat(channel_id) - except PeerIdInvalid as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - return None - - -async def isAdmin(message, user_id=None): - if message.chat.type == message.chat.type.PRIVATE: - return None - if user_id: - member = await message.chat.get_member(user_id) - else: - member = await message.chat.get_member(message.from_user.id) - return member.status in [member.status.ADMINISTRATOR, member.status.OWNER] - - -async def sendMultiMessage(chat_ids, text, buttons=None, photo=None): - msg_dict = {} - for channel_id in chat_ids.split(): - chat = await chat_info(channel_id) - try: - if photo: - try: - if photo == "Random": - photo = choice(IMAGES) - sent = await bot.send_photo( - chat_id=chat.id, - photo=photo, - caption=text, - reply_markup=buttons, - disable_notification=True, - ) - msg_dict[chat.id] = sent - continue - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendMultiMessage(chat_ids, text, buttons, des_dir) - await aioremove(des_dir) - return None - except Exception as e: - LOGGER.error(str(e)) - sent = await bot.send_message( - chat_id=chat.id, - text=text, - disable_web_page_preview=True, - disable_notification=True, - reply_markup=buttons, - ) - msg_dict[chat.id] = sent - except FloodWait as f: - LOGGER.warning(str(f)) + if block: await sleep(f.value * 1.2) - return await sendMultiMessage(chat_ids, text, buttons, photo) - except Exception as e: - LOGGER.error(str(e)) - return str(e) - return msg_dict - - -async def edit_message(message, text, buttons=None, photo=None): - try: - if message.media: - if photo: - return await message.edit_media( - InputMediaPhoto(photo, text), reply_markup=buttons - ) - return await message.edit_caption(caption=text, reply_markup=buttons) - await message.edit( - text=text, disable_web_page_preview=True, reply_markup=buttons - ) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await edit_message(message, text, buttons, photo) + return await edit_message(message, text, buttons, block, photo, MARKDOWN) except (MessageNotModified, MessageEmpty): pass except Exception as e: @@ -211,7 +100,7 @@ async def edit_message(message, text, buttons=None, photo=None): return str(e) -async def sendFile(message, file, caption=None, buttons=None): +async def sendFile(message, file, caption="", buttons=None): try: return await message.reply_document( document=file, @@ -223,7 +112,7 @@ async def sendFile(message, file, caption=None, buttons=None): except FloodWait as f: LOGGER.warning(str(f)) await sleep(f.value * 1.2) - return await sendFile(message, file, caption) + return await sendFile(message, file, caption, buttons) except Exception as e: LOGGER.error(str(e)) return str(e) @@ -247,39 +136,89 @@ async def five_minute_del(message): async def delete_links(message): - if DELETE_LINKS: - if reply_to := message.reply_to_message: - await delete_message(reply_to) - await delete_message(message) + if reply_to := message.reply_to_message: + await delete_message(reply_to) + await delete_message(message) -async def delete_all_messages(): - async with status_reply_dict_lock: - try: - for key, data in list(status_reply_dict.items()): - del status_reply_dict[key] - await delete_message(data[0]) - except Exception as e: - LOGGER.error(str(e)) +async def auto_delete_message(cmd_message=None, bot_message=None): + await sleep(60) + if cmd_message is not None: + await delete_message(cmd_message) + if bot_message is not None: + await delete_message(bot_message) + + +async def delete_status(): + async with task_dict_lock: + for key, data in list(status_dict.items()): + try: + await delete_message(data["message"]) + del status_dict[key] + except Exception as e: + LOGGER.error(str(e)) -async def get_tg_link_content(link): +async def get_tg_link_message(link, user_id=""): message = None + links = [] + user_s = None + + if user_id: + if user_id in session_cache: + user_s = session_cache[user_id] + else: + user_dict = user_data.get(user_id, {}) + session_string = user_dict.get("session_string") + if session_string: + user_s = Client( + f"session_{user_id}", + TELEGRAM_API, + TELEGRAM_HASH, + session_string=session_string, + no_updates=True, + ) + await user_s.start() + session_cache[user_id] = user_s + else: + user_s = user + if link.startswith("https://t.me/"): private = False msg = re_match( - r"https:\/\/t\.me\/(?:c\/)?([^\/]+)(?:\/[^\/]+)?\/([0-9]+)", link + r"https:\/\/t\.me\/(?:c\/)?([^\/]+)(?:\/[^\/]+)?\/([0-9-]+)", link ) else: private = True msg = re_match( - r"tg:\/\/openmessage\?user_id=([0-9]+)&message_id=([0-9]+)", link + r"tg:\/\/openmessage\?user_id=([0-9]+)&message_id=([0-9-]+)", link ) if not user: - raise TgLinkError("USER_SESSION_STRING required for this private link!") + raise TgLinkException( + "USER_SESSION_STRING required for this private link!" + ) + + chat = msg[1] + msg_id = msg[2] + if "-" in msg_id: + start_id, end_id = map(int, msg_id.split("-")) + msg_id = start_id + btw = end_id - start_id + if private: + link = link.split("&message_id=")[0] + links.append(f"{link}&message_id={start_id}") + for _ in range(btw): + start_id += 1 + links.append(f"{link}&message_id={start_id}") + else: + link = link.rsplit("/", 1)[0] + links.append(f"{link}/{start_id}") + for _ in range(btw): + start_id += 1 + links.append(f"{link}/{start_id}") + else: + msg_id = int(msg_id) - chat = msg.group(1) - msg_id = int(msg.group(2)) if chat.isdigit(): chat = int(chat) if private else int(f"-100{chat}") @@ -290,113 +229,115 @@ async def get_tg_link_content(link): private = True except Exception as e: private = True - if not user: + if not user_s: raise e - if private and user: + if not private: + return (links, bot) if links else (message, bot) + if user_s: try: - user_message = await user.get_messages(chat_id=chat, message_ids=msg_id) + user_message = await user_s.get_messages( + chat_id=chat, message_ids=msg_id + ) except Exception as e: - raise TgLinkError( - f"You don't have access to this chat!. ERROR: {e}" - ) from e + raise TgLinkException("We don't have access to this chat!") from e if not user_message.empty: - return user_message, "user" - raise TgLinkError("Private: Please report!") - if not private: - return message, "bot" - raise TgLinkError("Bot can't download from GROUPS without joining!") + return (links, user_s) if links else (user_message, user_s) + return None + raise TgLinkException("Private: Please report!") -async def update_all_messages(force=False): - async with status_reply_dict_lock: - if ( - not status_reply_dict - or not Interval - or (not force and time() - next(iter(status_reply_dict.values()))[1] < 3) - ): - return - for chat_id in list(status_reply_dict.keys()): - status_reply_dict[chat_id][1] = time() - async with download_dict_lock: - msg, buttons = await sync_to_async(get_readable_message) - if msg is None: +async def update_status_message(sid, force=False): + if Intervals["stopAll"]: return - async with status_reply_dict_lock: - for chat_id in list(status_reply_dict.keys()): - if ( - status_reply_dict[chat_id] - and msg != status_reply_dict[chat_id][0].text - ): - rmsg = await edit_message( - status_reply_dict[chat_id][0], msg, buttons - ) - if isinstance(rmsg, str) and rmsg.startswith("Telegram says: [400"): - del status_reply_dict[chat_id] - continue - status_reply_dict[chat_id][0].text = msg - status_reply_dict[chat_id][1] = time() + async with task_dict_lock: + if not status_dict.get(sid): + if obj := Intervals["status"].get(sid): + obj.cancel() + del Intervals["status"][sid] + return + if not force and time() - status_dict[sid]["time"] < 3: + return + status_dict[sid]["time"] = time() + page_no = status_dict[sid]["page_no"] + status = status_dict[sid]["status"] + is_user = status_dict[sid]["is_user"] + page_step = status_dict[sid]["page_step"] + text, buttons = await get_readable_message( + sid, is_user, page_no, status, page_step + ) + if text is None: + del status_dict[sid] + if obj := Intervals["status"].get(sid): + obj.cancel() + del Intervals["status"][sid] + return + if text != status_dict[sid]["message"].text: + message = await edit_message( + status_dict[sid]["message"], text, buttons, block=False + ) + if isinstance(message, str): + if message.startswith("Telegram says: [400"): + del status_dict[sid] + if obj := Intervals["status"].get(sid): + obj.cancel() + del Intervals["status"][sid] + else: + LOGGER.error( + f"Status with id: {sid} haven't been updated. Error: {message}" + ) + return + status_dict[sid]["message"].text = text + status_dict[sid]["time"] = time() -async def sendStatusMessage(msg): - async with download_dict_lock: - progress, buttons = await sync_to_async(get_readable_message) - if progress is None: +async def sendStatusMessage(msg, user_id=0): + if Intervals["stopAll"]: return - async with status_reply_dict_lock: - chat_id = msg.chat.id - if chat_id in list(status_reply_dict.keys()): - message = status_reply_dict[chat_id][0] + async with task_dict_lock: + sid = user_id or msg.chat.id + is_user = bool(user_id) + if sid in list(status_dict.keys()): + page_no = status_dict[sid]["page_no"] + status = status_dict[sid]["status"] + page_step = status_dict[sid]["page_step"] + text, buttons = await get_readable_message( + sid, is_user, page_no, status, page_step + ) + if text is None: + del status_dict[sid] + if obj := Intervals["status"].get(sid): + obj.cancel() + del Intervals["status"][sid] + return + message = status_dict[sid]["message"] await delete_message(message) - del status_reply_dict[chat_id] - message = await send_message(msg, progress, buttons) - message.text = progress - status_reply_dict[chat_id] = [message, time()] - if not Interval: - Interval.append(SetInterval(1, update_all_messages)) - - -async def forcesub(message, ids, button=None): - join_button = {} - _msg = "" - for channel_id in ids.split(): - chat = await chat_info(channel_id) - try: - await chat.get_member(message.from_user.id) - except UserNotParticipant: - if username := chat.username: - invite_link = f"https://t.me/{username}" - else: - invite_link = chat.invite_link - join_button[chat.title] = invite_link - except RPCError as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - except Exception as e: - LOGGER.error(f"{e} for {channel_id}") - if join_button: - if button is None: - button = ButtonMaker() - _msg = "You haven't joined our channel/group yet!" - for key, value in join_button.items(): - button.url(f"Join {key}", value, "footer") - return _msg, button - - -async def user_info(client, userId): - return await client.get_users(userId) - - -async def BotPm_check(message, button=None): - user_id = message.from_user.id - try: - temp_msg = await message._client.send_message( - chat_id=message.from_user.id, text="Checking Access..." - ) - await temp_msg.delete() - return None, button - except Exception: - if button is None: - button = ButtonMaker() - _msg = "You haven't initiated the bot in a private message!" - button.callback("Start", f"aeon {user_id} private", "header") - return _msg, button + message = await send_message(msg, text, buttons, block=False) + if isinstance(message, str): + LOGGER.error( + f"Status with id: {sid} haven't been sent. Error: {message}" + ) + return + message.text = text + status_dict[sid].update({"message": message, "time": time()}) + else: + text, buttons = await get_readable_message(sid, is_user) + if text is None: + return + message = await send_message(msg, text, buttons, block=False) + if isinstance(message, str): + LOGGER.error( + f"Status with id: {sid} haven't been sent. Error: {message}" + ) + return + message.text = text + status_dict[sid] = { + "message": message, + "time": time(), + "page_no": 1, + "page_step": 1, + "status": "All", + "is_user": is_user, + } + if not Intervals["status"].get(sid) and not is_user: + Intervals["status"][sid] = setInterval(1, update_status_message, sid) diff --git a/bot/modules/__init__.py b/bot/modules/__init__.py index e69de29bb..d3f5a12fa 100644 --- a/bot/modules/__init__.py +++ b/bot/modules/__init__.py @@ -0,0 +1 @@ + diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py index ca9c02ea4..20e17fb1e 100644 --- a/bot/modules/authorize.py +++ b/bot/modules/authorize.py @@ -1,87 +1,89 @@ from pyrogram.filters import command from pyrogram.handlers import MessageHandler -from bot import DATABASE_URL, bot, user_data +from bot import bot, user_data from bot.helper.ext_utils.bot_utils import update_user_ldata -from bot.helper.ext_utils.db_handler import DbManager +from bot.helper.ext_utils.db_handler import Database from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import send_message -async def change_authorization(message, is_authorize): +async def authorize(client, message): msg = message.text.split() if len(msg) > 1: id_ = int(msg[1].strip()) elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id + id_ = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) else: id_ = message.chat.id - if is_authorize: - success_message = "Authorized" - if id_ in user_data and user_data[id_].get("is_auth"): - success_message = "Already authorized!" - else: - update_user_ldata(id_, "is_auth", True) - if DATABASE_URL: - await DbManager().update_user_data(id_) + if id_ in user_data and user_data[id_].get("is_auth"): + msg = "Already Authorized!" else: - success_message = "Unauthorized" - if id_ not in user_data or user_data[id_].get("is_auth"): - update_user_ldata(id_, "is_auth", False) - if DATABASE_URL: - await DbManager().update_user_data(id_) - else: - success_message = "Already unauthorized!" - await send_message(message, success_message) + update_user_ldata(id_, "is_auth", True) + await Database().update_user_data(id_) + msg = "Authorized" + await send_message(message, msg) -async def change_sudo(message, is_sudo): - id_ = "" +async def unauthorize(client, message): msg = message.text.split() if len(msg) > 1: id_ = int(msg[1].strip()) elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - if is_sudo: - if id_: - if id_ in user_data and user_data[id_].get("is_sudo"): - success_message = "Already Sudo!" - else: - update_user_ldata(id_, "is_sudo", True) - if DATABASE_URL: - await DbManager().update_user_data(id_) - success_message = "Promoted as Sudo" - else: - success_message = ( - "Give ID or Reply To message of whom you want to Promote." - ) - elif id_ and id_ in user_data and user_data[id_].get("is_sudo"): - update_user_ldata(id_, "is_sudo", False) - if DATABASE_URL: - await DbManager().update_user_data(id_) - success_message = "Demoted" - else: - success_message = ( - "Give ID or Reply To message of whom you want to remove from Sudo" + id_ = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id ) - await send_message(message, success_message) - - -async def authorize(_, message): - await change_authorization(message, True) - - -async def unauthorize(_, message): - await change_authorization(message, False) + else: + id_ = message.chat.id + if id_ not in user_data or user_data[id_].get("is_auth"): + update_user_ldata(id_, "is_auth", False) + await Database().update_user_data(id_) + msg = "Unauthorized" + else: + msg = "Already Unauthorized!" + await send_message(message, msg) -async def addSudo(_, message): - await change_sudo(message, True) +async def addSudo(client, message): + id_ = "" + msg = message.text.split() + if len(msg) > 1: + id_ = int(msg[1].strip()) + elif reply_to := message.reply_to_message: + id_ = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) + if id_: + if id_ in user_data and user_data[id_].get("is_sudo"): + msg = "Already Sudo!" + else: + update_user_ldata(id_, "is_sudo", True) + await Database().update_user_data(id_) + msg = "Promoted as Sudo" + else: + msg = "Give ID or Reply To message of whom you want to Promote." + await send_message(message, msg) -async def removeSudo(_, message): - await change_sudo(message, False) +async def removeSudo(client, message): + id_ = "" + msg = message.text.split() + if len(msg) > 1: + id_ = int(msg[1].strip()) + elif reply_to := message.reply_to_message: + id_ = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) + if id_ and id_ not in user_data or user_data[id_].get("is_sudo"): + update_user_ldata(id_, "is_sudo", False) + await Database().update_user_data(id_) + msg = "Demoted" + else: + msg = "Give ID or Reply To message of whom you want to remove from Sudo" + await send_message(message, msg) bot.add_handler( diff --git a/bot/modules/bot_settings.py b/bot/modules/bot_settings.py index c355f54f8..2840c132a 100644 --- a/bot/modules/bot_settings.py +++ b/bot/modules/bot_settings.py @@ -1,13 +1,18 @@ from io import BytesIO from os import getcwd, environ from time import time -from asyncio import sleep, create_subprocess_exec, create_subprocess_shell +from asyncio import ( + sleep, + gather, + create_subprocess_exec, + create_subprocess_shell, +) from functools import partial from collections import OrderedDict from dotenv import load_dotenv from aiofiles import open as aiopen -from aioshutil import rmtree as aiormtree +from aioshutil import rmtree from aiofiles.os import path as aiopath from aiofiles.os import remove, rename from pyrogram.enums import ChatType @@ -15,391 +20,50 @@ from pyrogram.handlers import MessageHandler, CallbackQueryHandler from bot import ( - LOGGER, - DATABASE_URL, + DRIVES_IDS, + INDEX_URLS, + DRIVES_NAMES, GLOBAL_EXTENSION_FILTER, - Interval, + Intervals, bot, - aria2, + task_dict, + user_data, config_dict, - aria2_options, - download_dict, - extra_buttons, - shorteners_list, - list_drives_dict, - status_reply_dict_lock, ) from bot.modules.torrent_search import initiate_search_tools -from bot.helper.ext_utils.bot_utils import SetInterval, new_thread, sync_to_async -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.ext_utils.help_strings import bset_display_dict +from bot.helper.ext_utils.bot_utils import ( + new_thread, + setInterval, +) +from bot.helper.ext_utils.db_handler import Database from bot.helper.ext_utils.task_manager import start_from_queued from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( sendFile, + delete_links, edit_message, send_message, - update_all_messages, + delete_message, + update_status_message, ) START = 0 STATE = "view" handler_dict = {} + default_values = { - "DEFAULT_UPLOAD": "gd", - "SEARCH_LIMIT": 0, "UPSTREAM_BRANCH": "main", - "TORRENT_TIMEOUT": 3000, + "DEFAULT_UPLOAD": "gd", } -bool_vars = [ - "AS_DOCUMENT", - "DELETE_LINKS", - "STOP_DUPLICATE", - "SET_COMMANDS", - "SHOW_MEDIAINFO", - "USE_SERVICE_ACCOUNTS", -] - - -async def load_config(): - BOT_TOKEN = environ.get("BOT_TOKEN", "") - if len(BOT_TOKEN) == 0: - BOT_TOKEN = config_dict["BOT_TOKEN"] - - TELEGRAM_API = environ.get("TELEGRAM_API", "") - if len(TELEGRAM_API) == 0: - TELEGRAM_API = config_dict["TELEGRAM_API"] - else: - TELEGRAM_API = int(TELEGRAM_API) - - TELEGRAM_HASH = environ.get("TELEGRAM_HASH", "") - if len(TELEGRAM_HASH) == 0: - TELEGRAM_HASH = config_dict["TELEGRAM_HASH"] - - BOT_MAX_TASKS = environ.get("BOT_MAX_TASKS", "") - BOT_MAX_TASKS = int(BOT_MAX_TASKS) if BOT_MAX_TASKS.isdigit() else "" - - OWNER_ID = environ.get("OWNER_ID", "") - OWNER_ID = config_dict["OWNER_ID"] if len(OWNER_ID) == 0 else int(OWNER_ID) - - GROUPS_EMAIL = environ.get("GROUPS_EMAIL", "") - if len(GROUPS_EMAIL) != 0: - GROUPS_EMAIL = GROUPS_EMAIL.lower() - - DATABASE_URL = environ.get("DATABASE_URL", "") - if len(DATABASE_URL) == 0: - DATABASE_URL = "" - - GDRIVE_ID = environ.get("GDRIVE_ID", "") - if len(GDRIVE_ID) == 0: - GDRIVE_ID = "" - - RCLONE_PATH = environ.get("RCLONE_PATH", "") - if len(RCLONE_PATH) == 0: - RCLONE_PATH = "" - - DEFAULT_UPLOAD = environ.get("DEFAULT_UPLOAD", "") - if DEFAULT_UPLOAD != "rc": - DEFAULT_UPLOAD = "gd" - - RCLONE_FLAGS = environ.get("RCLONE_FLAGS", "") - if len(RCLONE_FLAGS) == 0: - RCLONE_FLAGS = "" - - EXTENSION_FILTER = environ.get("EXTENSION_FILTER", "") - if len(EXTENSION_FILTER) > 0: - fx = EXTENSION_FILTER.split() - GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append("aria2") - for x in fx: - if x.strip().startswith("."): - clean_x = x.lstrip(".") - GLOBAL_EXTENSION_FILTER.append(clean_x.strip().lower()) - - METADATA_KEY = environ.get("METADATA_KEY", "") - if len(METADATA_KEY) == 0: - METADATA_KEY = "" - - MEGA_EMAIL = environ.get("MEGA_EMAIL", "") - MEGA_PASSWORD = environ.get("MEGA_PASSWORD", "") - if len(MEGA_EMAIL) == 0 or len(MEGA_PASSWORD) == 0: - MEGA_EMAIL = "" - MEGA_PASSWORD = "" - - INDEX_URL = environ.get("INDEX_URL", "").rstrip("/") - if len(INDEX_URL) == 0: - INDEX_URL = "" - - SEARCH_API_LINK = environ.get("SEARCH_API_LINK", "").rstrip("/") - if len(SEARCH_API_LINK) == 0: - SEARCH_API_LINK = "" - - FILELION_API = environ.get("FILELION_API", "") - if len(FILELION_API) == 0: - FILELION_API = "" - - LEECH_LOG_ID = environ.get("LEECH_LOG_ID", "") - LEECH_LOG_ID = "" if len(LEECH_LOG_ID) == 0 else int(LEECH_LOG_ID) - - if len(download_dict) != 0: - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - Interval.append(SetInterval(1, update_all_messages)) - - YT_DLP_OPTIONS = environ.get("YT_DLP_OPTIONS", "") - if len(YT_DLP_OPTIONS) == 0: - YT_DLP_OPTIONS = "" - - SEARCH_LIMIT = environ.get("SEARCH_LIMIT", "") - SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT) - - LEECH_DUMP_ID = environ.get("LEECH_DUMP_ID", "") - if len(LEECH_DUMP_ID) == 0: - LEECH_DUMP_ID = "" - - CMD_SUFFIX = environ.get("CMD_SUFFIX", "") - - USER_SESSION_STRING = environ.get("USER_SESSION_STRING", "") - - TORRENT_TIMEOUT = environ.get("TORRENT_TIMEOUT", "") - TORRENT_TIMEOUT = 3000 if len(TORRENT_TIMEOUT) == 0 else int(TORRENT_TIMEOUT) - downloads = aria2.get_downloads() - if len(TORRENT_TIMEOUT) == 0: - for download in downloads: - if not download.is_complete: - try: - await sync_to_async( - aria2.client.change_option, - download.gid, - {"bt-stop-timeout": "0"}, - ) - except Exception as e: - LOGGER.error(e) - aria2_options["bt-stop-timeout"] = "0" - if DATABASE_URL: - await DbManager().update_aria2("bt-stop-timeout", "0") - TORRENT_TIMEOUT = "" - else: - for download in downloads: - if not download.is_complete: - try: - await sync_to_async( - aria2.client.change_option, - download.gid, - {"bt-stop-timeout": TORRENT_TIMEOUT}, - ) - except Exception as e: - LOGGER.error(e) - aria2_options["bt-stop-timeout"] = TORRENT_TIMEOUT - if DATABASE_URL: - await DbManager().update_aria2("bt-stop-timeout", TORRENT_TIMEOUT) - TORRENT_TIMEOUT = int(TORRENT_TIMEOUT) - - QUEUE_ALL = environ.get("QUEUE_ALL", "") - QUEUE_ALL = "" if len(QUEUE_ALL) == 0 else int(QUEUE_ALL) - - QUEUE_DOWNLOAD = environ.get("QUEUE_DOWNLOAD", "") - QUEUE_DOWNLOAD = "" if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD) - - QUEUE_UPLOAD = environ.get("QUEUE_UPLOAD", "") - QUEUE_UPLOAD = "" if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD) - - STREAMWISH_API = environ.get("STREAMWISH_API", "") - if len(STREAMWISH_API) == 0: - STREAMWISH_API = "" - - STOP_DUPLICATE = environ.get("STOP_DUPLICATE", "") - STOP_DUPLICATE = STOP_DUPLICATE.lower() == "true" - - USE_SERVICE_ACCOUNTS = environ.get("USE_SERVICE_ACCOUNTS", "") - USE_SERVICE_ACCOUNTS = USE_SERVICE_ACCOUNTS.lower() == "true" - - AS_DOCUMENT = environ.get("AS_DOCUMENT", "") - AS_DOCUMENT = AS_DOCUMENT.lower() == "true" - - SHOW_MEDIAINFO = environ.get("SHOW_MEDIAINFO", "") - SHOW_MEDIAINFO = SHOW_MEDIAINFO.lower() == "true" - - MEDIA_GROUP = environ.get("MEDIA_GROUP", "") - MEDIA_GROUP = MEDIA_GROUP.lower() == "true" - - await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait() - BASE_URL = environ.get("BASE_URL", "").rstrip("/") - if len(BASE_URL) == 0: - BASE_URL = "" - else: - await create_subprocess_shell( - "gunicorn web.wserver:app --bind 0.0.0.0:80 --worker-class gevent" - ) - - UPSTREAM_REPO = environ.get("UPSTREAM_REPO", "") - if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = "" - - UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "") - if len(UPSTREAM_BRANCH) == 0: - UPSTREAM_BRANCH = "main" - - TORRENT_LIMIT = environ.get("TORRENT_LIMIT", "") - TORRENT_LIMIT = "" if len(TORRENT_LIMIT) == 0 else float(TORRENT_LIMIT) - - DIRECT_LIMIT = environ.get("DIRECT_LIMIT", "") - DIRECT_LIMIT = "" if len(DIRECT_LIMIT) == 0 else float(DIRECT_LIMIT) - - YTDLP_LIMIT = environ.get("YTDLP_LIMIT", "") - YTDLP_LIMIT = "" if len(YTDLP_LIMIT) == 0 else float(YTDLP_LIMIT) - - GDRIVE_LIMIT = environ.get("GDRIVE_LIMIT", "") - GDRIVE_LIMIT = "" if len(GDRIVE_LIMIT) == 0 else float(GDRIVE_LIMIT) - - CLONE_LIMIT = environ.get("CLONE_LIMIT", "") - CLONE_LIMIT = "" if len(CLONE_LIMIT) == 0 else float(CLONE_LIMIT) - - MEGA_LIMIT = environ.get("MEGA_LIMIT", "") - MEGA_LIMIT = "" if len(MEGA_LIMIT) == 0 else float(MEGA_LIMIT) - - LEECH_LIMIT = environ.get("LEECH_LIMIT", "") - LEECH_LIMIT = "" if len(LEECH_LIMIT) == 0 else float(LEECH_LIMIT) - - DELETE_LINKS = environ.get("DELETE_LINKS", "") - DELETE_LINKS = DELETE_LINKS.lower() == "true" - - FSUB_IDS = environ.get("FSUB_IDS", "") - if len(FSUB_IDS) == 0: - FSUB_IDS = "" - - MIRROR_LOG_ID = environ.get("MIRROR_LOG_ID", "") - if len(MIRROR_LOG_ID) == 0: - MIRROR_LOG_ID = "" - - ATTACHMENT_URL = environ.get("ATTACHMENT_URL", "") - if len(ATTACHMENT_URL) == 0: - ATTACHMENT_URL = "" - - USER_MAX_TASKS = environ.get("USER_MAX_TASKS", "") - USER_MAX_TASKS = "" if len(USER_MAX_TASKS) == 0 else int(USER_MAX_TASKS) - - PLAYLIST_LIMIT = environ.get("PLAYLIST_LIMIT", "") - PLAYLIST_LIMIT = "" if len(PLAYLIST_LIMIT) == 0 else int(PLAYLIST_LIMIT) - - IMAGES = environ.get("IMAGES", "") - IMAGES = ( - IMAGES.replace("'", "") - .replace('"', "") - .replace("[", "") - .replace("]", "") - .replace(",", "") - ).split() - - SET_COMMANDS = environ.get("SET_COMMANDS", "") - SET_COMMANDS = SET_COMMANDS.lower() == "true" - - TOKEN_TIMEOUT = environ.get("TOKEN_TIMEOUT", "") - TOKEN_TIMEOUT = int(TOKEN_TIMEOUT) if TOKEN_TIMEOUT.isdigit() else "" - - list_drives_dict.clear() - - if GDRIVE_ID: - list_drives_dict["Main"] = {"drive_id": GDRIVE_ID, "index_link": INDEX_URL} - - if await aiopath.exists("list_drives.txt"): - async with aiopen("list_drives.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - sep = 2 if line.strip().split()[-1].startswith("http") else 1 - temp = line.strip().rsplit(maxsplit=sep) - name = "Main Custom" if temp[0].casefold() == "Main" else temp[0] - list_drives_dict[name] = { - "drive_id": temp[1], - "index_link": (temp[2] if sep == 2 else ""), - } - - extra_buttons.clear() - if await aiopath.exists("buttons.txt"): - async with aiopen("buttons.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - temp = line.strip().split() - if len(extra_buttons.keys()) == 4: - break - if len(temp) == 2: - extra_buttons[temp[0].replace("_", " ")] = temp[1] - - shorteners_list.clear() - if await aiopath.exists("shorteners.txt"): - async with aiopen("shorteners.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - temp = line.strip().split() - if len(temp) == 2: - shorteners_list.append({"domain": temp[0], "api_key": temp[1]}) - - config_dict.update( - { - "AS_DOCUMENT": AS_DOCUMENT, - "BASE_URL": BASE_URL, - "BOT_TOKEN": BOT_TOKEN, - "BOT_MAX_TASKS": BOT_MAX_TASKS, - "CMD_SUFFIX": CMD_SUFFIX, - "DATABASE_URL": DATABASE_URL, - "DEFAULT_UPLOAD": DEFAULT_UPLOAD, - "DELETE_LINKS": DELETE_LINKS, - "TORRENT_LIMIT": TORRENT_LIMIT, - "DIRECT_LIMIT": DIRECT_LIMIT, - "YTDLP_LIMIT": YTDLP_LIMIT, - "GDRIVE_LIMIT": GDRIVE_LIMIT, - "CLONE_LIMIT": CLONE_LIMIT, - "MEGA_LIMIT": MEGA_LIMIT, - "LEECH_LIMIT": LEECH_LIMIT, - "FSUB_IDS": FSUB_IDS, - "FILELION_API": FILELION_API, - "USER_MAX_TASKS": USER_MAX_TASKS, - "PLAYLIST_LIMIT": PLAYLIST_LIMIT, - "MIRROR_LOG_ID": MIRROR_LOG_ID, - "LEECH_DUMP_ID": LEECH_DUMP_ID, - "IMAGES": IMAGES, - "EXTENSION_FILTER": EXTENSION_FILTER, - "ATTACHMENT_URL": ATTACHMENT_URL, - "GDRIVE_ID": GDRIVE_ID, - "INDEX_URL": INDEX_URL, - "LEECH_LOG_ID": LEECH_LOG_ID, - "TOKEN_TIMEOUT": TOKEN_TIMEOUT, - "MEDIA_GROUP": MEDIA_GROUP, - "MEGA_EMAIL": MEGA_EMAIL, - "MEGA_PASSWORD": MEGA_PASSWORD, - "METADATA_KEY": METADATA_KEY, - "OWNER_ID": OWNER_ID, - "QUEUE_ALL": QUEUE_ALL, - "QUEUE_DOWNLOAD": QUEUE_DOWNLOAD, - "QUEUE_UPLOAD": QUEUE_UPLOAD, - "RCLONE_FLAGS": RCLONE_FLAGS, - "RCLONE_PATH": RCLONE_PATH, - "SEARCH_API_LINK": SEARCH_API_LINK, - "SEARCH_LIMIT": SEARCH_LIMIT, - "SET_COMMANDS": SET_COMMANDS, - "SHOW_MEDIAINFO": SHOW_MEDIAINFO, - "STOP_DUPLICATE": STOP_DUPLICATE, - "STREAMWISH_API": STREAMWISH_API, - "TELEGRAM_API": TELEGRAM_API, - "TELEGRAM_HASH": TELEGRAM_HASH, - "TORRENT_TIMEOUT": TORRENT_TIMEOUT, - "UPSTREAM_REPO": UPSTREAM_REPO, - "UPSTREAM_BRANCH": UPSTREAM_BRANCH, - "USER_SESSION_STRING": USER_SESSION_STRING, - "GROUPS_EMAIL": GROUPS_EMAIL, - "USE_SERVICE_ACCOUNTS": USE_SERVICE_ACCOUNTS, - "YT_DLP_OPTIONS": YT_DLP_OPTIONS, - } - ) - if DATABASE_URL: - await DbManager().update_config(config_dict) - await initiate_search_tools() - await start_from_queued() +boolean_variables = { + "STOP_DUPLICATE", + "IS_TEAM_DRIVE", + "USE_SA", + "AS_DOCUMENT", +} async def get_buttons(key=None, edit_type=None, edit_mode=None, mess=None): @@ -424,46 +88,33 @@ async def get_buttons(key=None, edit_type=None, edit_mode=None, mess=None): elif key == "private": buttons.callback("Back", "botset back") buttons.callback("Close", "botset close") - msg = "Send private files: config.env, token.pickle, cookies.txt, accounts.zip, terabox.txt, .netrc, or any other files!\n\nTo delete a private file, send only the file name as a text message.\n\nPlease note: Changes to .netrc will not take effect for aria2c until it's restarted.\n\nTimeout: 60 seconds" + msg = "Send private files" elif edit_type == "editvar": msg = f"Variable:{key}
\n\n" - msg += f'Description: {bset_display_dict.get(key, "No Description Provided")}\n\n' if mess.chat.type == ChatType.PRIVATE: msg += f'Value:{config_dict.get(key, "None")}
\n\n' - elif key not in bool_vars: + elif key not in boolean_variables: buttons.callback( "View value", f"botset showvar {key}", position="header" ) buttons.callback("Back", "botset back var", position="footer") - if key not in bool_vars: + if key not in boolean_variables: if not edit_mode: buttons.callback("Edit Value", f"botset editvar {key} edit") else: buttons.callback("Stop Edit", f"botset editvar {key}") - if ( - key not in ["TELEGRAM_HASH", "TELEGRAM_API", "OWNER_ID", "BOT_TOKEN"] - and key not in bool_vars - ): buttons.callback("Reset", f"botset resetvar {key}") buttons.callback("Close", "botset close", position="footer") - if edit_mode and key in [ - "CMD_SUFFIX", - "OWNER_ID", - "USER_SESSION_STRING", - "TELEGRAM_HASH", - "TELEGRAM_API", - "DATABASE_URL", - "BOT_TOKEN", - ]: + if edit_mode and key in ["CMD_SUFFIX", "USER_SESSION_STRING"]: msg += "Note: Restart required for this edit to take effect!\n\n" - if edit_mode and key not in bool_vars: + if edit_mode and key not in boolean_variables: msg += "Send a valid value for the above Var. Timeout: 60 sec" - if key in bool_vars: + if key in boolean_variables: if not config_dict.get(key): buttons.callback("Make it True", f"botset boolvar {key} on") else: buttons.callback("Make it False", f"botset boolvar {key} off") - button = buttons.column(1) if key is None else buttons.column(2) + button = buttons.menu(1) if key is None else buttons.menu(2) return msg, button @@ -475,50 +126,30 @@ async def update_buttons(message, key=None, edit_type=None, edit_mode=None): async def edit_variable(_, message, pre_message, key): handler_dict[message.chat.id] = False value = message.text - if key == "LEECH_LOG_ID": - value = int(value) - elif key == "TORRENT_TIMEOUT": - value = int(value) - downloads = await sync_to_async(aria2.get_downloads) - for download in downloads: - if not download.is_complete: - try: - await sync_to_async( - aria2.client.change_option, - download.gid, - {"bt-stop-timeout": f"{value}"}, - ) - except Exception as e: - LOGGER.error(e) - aria2_options["bt-stop-timeout"] = f"{value}" - elif key == "EXTENSION_FILTER": + if key == "EXTENSION_FILTER": fx = value.split() GLOBAL_EXTENSION_FILTER.clear() GLOBAL_EXTENSION_FILTER.append(".aria2") for x in fx: - if x.strip().startswith("."): - clean_x = x.lstrip(".") - GLOBAL_EXTENSION_FILTER.append(clean_x.strip().lower()) + x = x.lstrip(".") + GLOBAL_EXTENSION_FILTER.append(x.strip().lower()) elif key == "GDRIVE_ID": - list_drives_dict["Main"] = { - "drive_id": value, - "index_link": config_dict["INDEX_URL"], - } + if DRIVES_NAMES and DRIVES_NAMES[0] == "Main": + DRIVES_IDS[0] = value + else: + DRIVES_IDS.insert(0, value) elif key == "INDEX_URL": - list_drives_dict["Main"] = { - "drive_id": config_dict["GDRIVE_ID"], - "index_link": value, - } + if DRIVES_NAMES and DRIVES_NAMES[0] == "Main": + INDEX_URLS[0] = value + else: + INDEX_URLS.insert(0, value) elif value.isdigit(): value = int(value) config_dict[key] = value await update_buttons(pre_message, key, "editvar", False) await message.delete() - if DATABASE_URL: - await DbManager().update_config({key: value}) - if key == "SEARCH_API_LINK": - await initiate_search_tools() - elif key in ["QUEUE_ALL", "QUEUE_DOWNLOAD", "QUEUE_UPLOAD"]: + await Database().update_config({key: value}) + if key in ["QUEUE_ALL", "QUEUE_DOWNLOAD", "QUEUE_UPLOAD"]: await start_from_queued() @@ -530,29 +161,26 @@ async def update_private_file(_, message, pre_message): await remove(fn) if fn == "accounts": if await aiopath.exists("accounts"): - await aiormtree("accounts") + await rmtree("accounts", ignore_errors=True) if await aiopath.exists("rclone_sa"): - await aiormtree("rclone_sa") - config_dict["USE_SERVICE_ACCOUNTS"] = False - if DATABASE_URL: - await DbManager().update_config({"USE_SERVICE_ACCOUNTS": False}) + await rmtree("rclone_sa", ignore_errors=True) + config_dict["USE_SA"] = False + await Database().update_config({"USE_SA": False}) elif file_name in [".netrc", "netrc"]: await (await create_subprocess_exec("touch", ".netrc")).wait() await (await create_subprocess_exec("chmod", "600", ".netrc")).wait() await ( await create_subprocess_exec("cp", ".netrc", "/root/.netrc") ).wait() - elif file_name in ["buttons.txt", "buttons"]: - extra_buttons.clear() - await message.delete() + await delete_message(message) elif doc := message.document: file_name = doc.file_name await message.download(file_name=f"{getcwd()}/{file_name}") if file_name == "accounts.zip": if await aiopath.exists("accounts"): - await aiormtree("accounts") + await rmtree("accounts", ignore_errors=True) if await aiopath.exists("rclone_sa"): - await aiormtree("rclone_sa") + await rmtree("rclone_sa", ignore_errors=True) await ( await create_subprocess_exec( "7z", "x", "-o.", "-aoa", "accounts.zip", "accounts/*.json" @@ -562,42 +190,23 @@ async def update_private_file(_, message, pre_message): await create_subprocess_exec("chmod", "-R", "777", "accounts") ).wait() elif file_name == "list_drives.txt": - list_drives_dict.clear() + DRIVES_IDS.clear() + DRIVES_NAMES.clear() + INDEX_URLS.clear() if GDRIVE_ID := config_dict["GDRIVE_ID"]: - list_drives_dict["Main"] = { - "drive_id": GDRIVE_ID, - "index_link": config_dict["INDEX_URL"], - } + DRIVES_NAMES.append("Main") + DRIVES_IDS.append(GDRIVE_ID) + INDEX_URLS.append(config_dict["INDEX_URL"]) async with aiopen("list_drives.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - sep = 2 if line.strip().split()[-1].startswith("http") else 1 - temp = line.strip().rsplit(maxsplit=sep) - name = "Main Custom" if temp[0].casefold() == "Main" else temp[0] - list_drives_dict[name] = { - "drive_id": temp[1], - "index_link": (temp[2] if sep == 2 else ""), - } - elif file_name == "buttons.txt": - extra_buttons.clear() - async with aiopen("buttons.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - temp = line.strip().split() - if len(extra_buttons.keys()) == 4: - break - if len(temp) == 2: - extra_buttons[temp[0].replace("_", " ")] = temp[1] - elif file_name == "shorteners.txt": - shorteners_list.clear() - async with aiopen("shorteners.txt", "r+") as f: lines = await f.readlines() for line in lines: temp = line.strip().split() - if len(temp) == 2: - shorteners_list.append( - {"domain": temp[0], "api_key": temp[1]} - ) + DRIVES_IDS.append(temp[1]) + DRIVES_NAMES.append(temp[0].replace("_", " ")) + if len(temp) > 2: + INDEX_URLS.append(temp[2]) + else: + INDEX_URLS.append("") elif file_name in [".netrc", "netrc"]: if file_name == "netrc": await rename("netrc", ".netrc") @@ -609,10 +218,9 @@ async def update_private_file(_, message, pre_message): elif file_name == "config.env": load_dotenv("config.env", override=True) await load_config() - await message.delete() + await delete_message(message) await update_buttons(pre_message) - if DATABASE_URL: - await DbManager().update_private_file(file_name) + await Database().update_private_file(file_name) if await aiopath.exists("accounts.zip"): await remove("accounts.zip") @@ -648,8 +256,7 @@ async def edit_bot_settings(client, query): if data[1] == "close": handler_dict[message.chat.id] = False await query.answer() - await message.delete() - await message.reply_to_message.delete() + await delete_links(message) elif data[1] == "back": handler_dict[message.chat.id] = False await query.answer() @@ -669,32 +276,22 @@ async def edit_bot_settings(client, query): elif data[2] == "EXTENSION_FILTER": GLOBAL_EXTENSION_FILTER.clear() GLOBAL_EXTENSION_FILTER.append(".aria2") - elif data[2] == "TORRENT_TIMEOUT": - downloads = await sync_to_async(aria2.get_downloads) - for download in downloads: - if not download.is_complete: - try: - await sync_to_async( - aria2.client.change_option, - download.gid, - {"bt-stop-timeout": "0"}, - ) - except Exception as e: - LOGGER.error(e) - aria2_options["bt-stop-timeout"] = "0" - if DATABASE_URL: - await DbManager().update_aria2("bt-stop-timeout", "0") elif data[2] == "BASE_URL": await ( await create_subprocess_exec("pkill", "-9", "-f", "gunicorn") ).wait() + elif data[2] == "GDRIVE_ID": + if DRIVES_NAMES and DRIVES_NAMES[0] == "Main": + DRIVES_NAMES.pop(0) + DRIVES_IDS.pop(0) + INDEX_URLS.pop(0) + elif data[2] == "INDEX_URL": + if DRIVES_NAMES and DRIVES_NAMES[0] == "Main": + INDEX_URLS[0] = "" config_dict[data[2]] = value await update_buttons(message, data[2], "editvar", False) - if DATABASE_URL: - await DbManager().update_config({data[2]: value}) - if data[2] == "SEARCH_API_LINK": - await initiate_search_tools() - elif data[2] in ["QUEUE_ALL", "QUEUE_DOWNLOAD", "QUEUE_UPLOAD"]: + await Database().update_config({data[2]: value}) + if data[2] in ["QUEUE_ALL", "QUEUE_DOWNLOAD", "QUEUE_UPLOAD"]: await start_from_queued() elif data[1] == "private": handler_dict[message.chat.id] = False @@ -711,14 +308,13 @@ async def edit_bot_settings(client, query): ) config_dict[data[2]] = value await update_buttons(message, data[2], "editvar", False) - if DATABASE_URL: - await DbManager().update_config({data[2]: value}) + await Database().update_config({data[2]: value}) elif data[1] == "editvar": handler_dict[message.chat.id] = False await query.answer() edit_mode = len(data) == 4 await update_buttons(message, data[2], data[1], edit_mode) - if data[2] in bool_vars or not edit_mode: + if data[2] in boolean_variables or not edit_mode: return pfunc = partial(edit_variable, pre_message=message, key=data[2]) rfunc = partial(update_buttons, message, data[2], data[1], edit_mode) @@ -755,6 +351,150 @@ async def bot_settings(_, message): await send_message(message, msg, button) +async def load_config(): + GDRIVE_ID = environ.get("GDRIVE_ID", "") + RCLONE_PATH = environ.get("RCLONE_PATH", "") + DEFAULT_UPLOAD = environ.get("DEFAULT_UPLOAD", "") + if DEFAULT_UPLOAD != "rc": + DEFAULT_UPLOAD = "gd" + + RCLONE_FLAGS = environ.get("RCLONE_FLAGS", "") + AUTHORIZED_CHATS = environ.get("AUTHORIZED_CHATS", "") + if len(AUTHORIZED_CHATS) != 0: + aid = AUTHORIZED_CHATS.split() + for id_ in aid: + user_data[int(id_.strip())] = {"is_auth": True} + + SUDO_USERS = environ.get("SUDO_USERS", "") + if len(SUDO_USERS) != 0: + aid = SUDO_USERS.split() + for id_ in aid: + user_data[int(id_.strip())] = {"is_sudo": True} + + EXTENSION_FILTER = environ.get("EXTENSION_FILTER", "") + if len(EXTENSION_FILTER) > 0: + fx = EXTENSION_FILTER.split() + GLOBAL_EXTENSION_FILTER.clear() + GLOBAL_EXTENSION_FILTER.extend(["aria2", "!qB"]) + for x in fx: + if x.strip().startswith("."): + x = x.lstrip(".") + GLOBAL_EXTENSION_FILTER.append(x.strip().lower()) + + FILELION_API = environ.get("FILELION_API", "") + STREAMWISH_API = environ.get("STREAMWISH_API", "") + INDEX_URL = environ.get("INDEX_URL", "").rstrip("/") + + if len(task_dict) != 0 and (st := Intervals["status"]): + for key, intvl in list(st.items()): + intvl.cancel() + Intervals["status"][key] = setInterval(1, update_status_message, key) + + YT_DLP_OPTIONS = environ.get("YT_DLP_OPTIONS", "") + LEECH_DUMP_CHAT = environ.get("LEECH_DUMP_CHAT", "") + LEECH_DUMP_CHAT = "" if len(LEECH_DUMP_CHAT) == 0 else int(LEECH_DUMP_CHAT) + + LOG_CHAT = environ.get("LOG_CHAT", "") + LOG_CHAT = "" if len(LOG_CHAT) == 0 else int(LOG_CHAT) + + CMD_SUFFIX = environ.get("CMD_SUFFIX", "") + FSUB_IDS = environ.get("FSUB_IDS", "") + USER_SESSION_STRING = environ.get("USER_SESSION_STRING", "") + MEGA_EMAIL = environ.get("MEGA_EMAIL", "") + MEGA_PASSWORD = environ.get("MEGA_PASSWORD", "") + PAID_CHAT_ID = environ.get("PAID_CHAT_ID", "") + PAID_CHAT_ID = int(PAID_CHAT_ID) if PAID_CHAT_ID else "" + PAID_CHAT_LINK = environ.get("PAID_CHAT_LINK", "") + QUEUE_ALL = environ.get("QUEUE_ALL", "") + QUEUE_ALL = "" if len(QUEUE_ALL) == 0 else int(QUEUE_ALL) + + TOKEN_TIMEOUT = environ.get("TOKEN_TIMEOUT", "") + TOKEN_TIMEOUT = int(TOKEN_TIMEOUT) if TOKEN_TIMEOUT.isdigit() else "" + + QUEUE_DOWNLOAD = environ.get("QUEUE_DOWNLOAD", "") + QUEUE_DOWNLOAD = "" if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD) + + QUEUE_UPLOAD = environ.get("QUEUE_UPLOAD", "") + QUEUE_UPLOAD = "" if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD) + + STOP_DUPLICATE = environ.get("STOP_DUPLICATE", "").lower() == "true" + + IS_TEAM_DRIVE = environ.get("IS_TEAM_DRIVE", "").lower() == "true" + + USE_SA = environ.get("USE_SA", "").lower() == "true" + + AS_DOCUMENT = environ.get("AS_DOCUMENT", "").lower() == "true" + + await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait() + BASE_URL = environ.get("BASE_URL", "").rstrip("/") + if len(BASE_URL) == 0: + BASE_URL = "" + else: + await create_subprocess_shell( + "gunicorn web.wserver:app --bind 0.0.0.0:80 --worker-class gevent" + ) + + UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "main") + DRIVES_IDS.clear() + DRIVES_NAMES.clear() + INDEX_URLS.clear() + + if GDRIVE_ID: + DRIVES_NAMES.append("Main") + DRIVES_IDS.append(GDRIVE_ID) + INDEX_URLS.append(INDEX_URL) + + if await aiopath.exists("list_drives.txt"): + async with aiopen("list_drives.txt", "r+") as f: + lines = await f.readlines() + for line in lines: + temp = line.strip().split() + DRIVES_IDS.append(temp[1]) + DRIVES_NAMES.append(temp[0].replace("_", " ")) + if len(temp) > 2: + INDEX_URLS.append(temp[2]) + else: + INDEX_URLS.append("") + + config_dict.update( + { + "AS_DOCUMENT": AS_DOCUMENT, + "AUTHORIZED_CHATS": AUTHORIZED_CHATS, + "BASE_URL": BASE_URL, + "CMD_SUFFIX": CMD_SUFFIX, + "DEFAULT_UPLOAD": DEFAULT_UPLOAD, + "EXTENSION_FILTER": EXTENSION_FILTER, + "FILELION_API": FILELION_API, + "FSUB_IDS": FSUB_IDS, + "GDRIVE_ID": GDRIVE_ID, + "INDEX_URL": INDEX_URL, + "IS_TEAM_DRIVE": IS_TEAM_DRIVE, + "LEECH_DUMP_CHAT": LEECH_DUMP_CHAT, + "LOG_CHAT": LOG_CHAT, + "MEGA_EMAIL": MEGA_EMAIL, + "MEGA_PASSWORD": MEGA_PASSWORD, + "PAID_CHAT_ID": PAID_CHAT_ID, + "PAID_CHAT_LINK": PAID_CHAT_LINK, + "QUEUE_ALL": QUEUE_ALL, + "QUEUE_DOWNLOAD": QUEUE_DOWNLOAD, + "QUEUE_UPLOAD": QUEUE_UPLOAD, + "RCLONE_FLAGS": RCLONE_FLAGS, + "RCLONE_PATH": RCLONE_PATH, + "STOP_DUPLICATE": STOP_DUPLICATE, + "STREAMWISH_API": STREAMWISH_API, + "SUDO_USERS": SUDO_USERS, + "TOKEN_TIMEOUT": TOKEN_TIMEOUT, + "UPSTREAM_BRANCH": UPSTREAM_BRANCH, + "USER_SESSION_STRING": USER_SESSION_STRING, + "USE_SA": USE_SA, + "YT_DLP_OPTIONS": YT_DLP_OPTIONS, + } + ) + + await Database().update_config(config_dict) + await gather(initiate_search_tools(), start_from_queued()) + + bot.add_handler( MessageHandler( bot_settings, filters=command(BotCommands.BotSetCommand) & CustomFilters.sudo diff --git a/bot/modules/broadcast.py b/bot/modules/broadcast.py index 262edc76c..3b5353a7b 100644 --- a/bot/modules/broadcast.py +++ b/bot/modules/broadcast.py @@ -5,9 +5,10 @@ from pyrogram.filters import command from pyrogram.handlers import MessageHandler -from bot import DATABASE_URL, bot -from bot.helper.ext_utils.bot_utils import new_task, get_readable_time -from bot.helper.ext_utils.db_handler import DbManager +from bot import bot +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.db_handler import Database +from bot.helper.ext_utils.status_utils import get_readable_time from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import edit_message, send_message @@ -15,10 +16,6 @@ @new_task async def broadcast(_, message): - if not DATABASE_URL: - await send_message(message, "DATABASE_URL not provided!") - return - if not message.reply_to_message: await send_message( message, "Reply to any message to broadcast messages to users in Bot PM." @@ -30,7 +27,7 @@ async def broadcast(_, message): updater = time() broadcast_message = await send_message(message, "Broadcast in progress...") - for uid in await DbManager().get_pm_uids(): + for uid in await Database().get_pm_uids(): try: await message.reply_to_message.copy(uid) successful += 1 @@ -39,7 +36,7 @@ async def broadcast(_, message): await message.reply_to_message.copy(uid) successful += 1 except (UserIsBlocked, InputUserDeactivated): - await DbManager().rm_pm_user(uid) + await Database().rm_pm_user(uid) blocked += 1 except Exception: unsuccessful += 1 diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py deleted file mode 100644 index 97f6268e9..000000000 --- a/bot/modules/cancel_mirror.py +++ /dev/null @@ -1,121 +0,0 @@ -from asyncio import sleep - -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import OWNER_ID, bot, bot_name, user_data, download_dict, download_dict_lock -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - new_task, - get_all_task, - get_task_by_gid, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - send_message, - delete_message, - one_minute_del, -) - - -@new_task -async def cancel_mirror(_, message): - user_id = message.from_user.id - msg = message.text.split("_", maxsplit=1) - await delete_message(message) - - if len(msg) > 1: - cmd_data = msg[1].split("@", maxsplit=1) - if len(cmd_data) > 1 and cmd_data[1].strip() != bot_name: - return - gid = cmd_data[0] - dl = await get_task_by_gid(gid) - if dl is None: - await delete_message(message) - return - elif reply_to_id := message.reply_to_message_id: - async with download_dict_lock: - dl = download_dict.get(reply_to_id, None) - if dl is None: - await delete_message(message) - return - elif len(msg) == 1: - await delete_message(message) - return - - if user_id not in (OWNER_ID, dl.message.from_user.id) and ( - user_id not in user_data or not user_data[user_id].get("is_sudo") - ): - await delete_message(message) - return - - obj = dl.download() - await obj.cancel_download() - - -async def cancel_all(status): - matches = await get_all_task(status) - if not matches: - return False - for dl in matches: - obj = dl.download() - await obj.cancel_download() - await sleep(1) - return True - - -async def cancell_all_buttons(_, message): - async with download_dict_lock: - count = len(download_dict) - if count == 0: - await send_message(message, "No active tasks!") - return - - buttons = ButtonMaker() - buttons.callback("Downloading", f"stopall {MirrorStatus.STATUS_DOWNLOADING}") - buttons.callback("Uploading", f"stopall {MirrorStatus.STATUS_UPLOADING}") - buttons.callback("Seeding", f"stopall {MirrorStatus.STATUS_SEEDING}") - buttons.callback("Cloning", f"stopall {MirrorStatus.STATUS_CLONING}") - buttons.callback("Extracting", f"stopall {MirrorStatus.STATUS_EXTRACTING}") - buttons.callback("Archiving", f"stopall {MirrorStatus.STATUS_ARCHIVING}") - buttons.callback("QueuedDl", f"stopall {MirrorStatus.STATUS_QUEUEDL}") - buttons.callback("QueuedUp", f"stopall {MirrorStatus.STATUS_QUEUEUP}") - buttons.callback("Paused", f"stopall {MirrorStatus.STATUS_PAUSED}") - buttons.callback("All", "stopall all") - buttons.callback("Close", "stopall close") - button = buttons.column(2) - can_msg = await send_message(message, "Choose tasks to cancel.", button) - await delete_message(message) - await one_minute_del(can_msg) - - -@new_task -async def cancel_all_update(_, query): - data = query.data.split() - message = query.message - reply_to = message.reply_to_message - await query.answer() - if data[1] == "close": - await delete_message(reply_to) - await delete_message(message) - else: - res = await cancel_all(data[1]) - if not res: - await send_message(reply_to, f"No matching tasks for {data[1]}!") - - -bot.add_handler( - MessageHandler( - cancel_mirror, - filters=regex(r"^/stop(_\w+)?(?!all)") & CustomFilters.authorized, - ) -) -bot.add_handler( - MessageHandler( - cancell_all_buttons, - filters=command(BotCommands.StopAllCommand) & CustomFilters.sudo, - ) -) -bot.add_handler(CallbackQueryHandler(cancel_all_update, filters=regex(r"^stopall"))) diff --git a/bot/modules/cancel_task.py b/bot/modules/cancel_task.py new file mode 100644 index 000000000..541be7987 --- /dev/null +++ b/bot/modules/cancel_task.py @@ -0,0 +1,179 @@ +from asyncio import sleep + +from pyrogram.filters import regex, command +from pyrogram.handlers import MessageHandler, CallbackQueryHandler + +from bot import OWNER_ID, bot, task_dict, user_data, multi_tags, task_dict_lock +from bot.helper.telegram_helper import button_build +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.status_utils import MirrorStatus, getAllTasks, getTaskByGid +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.message_utils import ( + edit_message, + send_message, + delete_message, + auto_delete_message, +) + + +async def cancel_task(_, message): + user_id = message.from_user.id if message.from_user else message.sender_chat.id + msg = message.text.split("_", maxsplit=1) + await delete_message(message) + if len(msg) > 1: + gid = msg[1].split("@", maxsplit=1) + gid = gid[0] + if len(gid) == 4: + multi_tags.discard(gid) + return + task = await getTaskByGid(gid) + if task is None: + await delete_message(message) + return + elif reply_to_id := message.reply_to_message_id: + async with task_dict_lock: + task = task_dict.get(reply_to_id) + if task is None: + return + elif len(msg) == 1: + return + if user_id not in (OWNER_ID, task.listener.userId) and ( + user_id not in user_data or not user_data[user_id].get("is_sudo") + ): + return + obj = task.task() + await obj.cancel_task() + + +async def cancel_multi(_, query): + data = query.data.split() + user_id = query.from_user.id + if user_id != int(data[1]) and not await CustomFilters.sudo("", query): + await query.answer("Not Yours!", show_alert=True) + return + tag = int(data[2]) + if tag in multi_tags: + multi_tags.discard(int(data[2])) + msg = "Stopped!" + else: + msg = "Already Stopped/Finished!" + await query.answer(msg, show_alert=True) + await delete_message(query.message) + + +async def cancel_all(status, userId): + matches = await getAllTasks(status.strip(), userId) + if not matches: + return False + for task in matches: + obj = task.task() + await obj.cancel_task() + await sleep(2) + return True + + +def create_cancel_buttons(isSudo, userId=""): + buttons = button_build.ButtonMaker() + buttons.callback( + "Downloading", f"canall ms {MirrorStatus.STATUS_DOWNLOADING} {userId}" + ) + buttons.callback( + "Uploading", f"canall ms {MirrorStatus.STATUS_UPLOADING} {userId}" + ) + buttons.callback("Seeding", f"canall ms {MirrorStatus.STATUS_SEEDING} {userId}") + buttons.callback( + "Spltting", f"canall ms {MirrorStatus.STATUS_SPLITTING} {userId}" + ) + buttons.callback("Cloning", f"canall ms {MirrorStatus.STATUS_CLONING} {userId}") + buttons.callback( + "Extracting", f"canall ms {MirrorStatus.STATUS_EXTRACTING} {userId}" + ) + buttons.callback( + "Archiving", f"canall ms {MirrorStatus.STATUS_ARCHIVING} {userId}" + ) + buttons.callback("QueuedDl", f"canall ms {MirrorStatus.STATUS_QUEUEDL} {userId}") + buttons.callback("QueuedUp", f"canall ms {MirrorStatus.STATUS_QUEUEUP} {userId}") + buttons.callback( + "SampleVideo", f"canall ms {MirrorStatus.STATUS_SAMVID} {userId}" + ) + buttons.callback( + "ConvertMedia", f"canall ms {MirrorStatus.STATUS_CONVERTING} {userId}" + ) + buttons.callback("Paused", f"canall ms {MirrorStatus.STATUS_PAUSED} {userId}") + buttons.callback("All", f"canall ms All {userId}") + if isSudo: + if userId: + buttons.callback("All Added Tasks", f"canall bot ms {userId}") + else: + buttons.callback("My Tasks", f"canall user ms {userId}") + buttons.callback("Close", f"canall close ms {userId}") + return buttons.menu(2) + + +async def cancell_all_buttons(_, message): + async with task_dict_lock: + count = len(task_dict) + if count == 0: + await send_message(message, "No active tasks!") + return + isSudo = await CustomFilters.sudo("", message) + button = create_cancel_buttons(isSudo, message.from_user.id) + can_msg = await send_message(message, "Choose tasks to cancel!", button) + await auto_delete_message(message, can_msg) + + +@new_task +async def cancel_all_update(_, query): + data = query.data.split() + message = query.message + reply_to = message.reply_to_message + userId = int(data[3]) if len(data) > 3 else "" + isSudo = await CustomFilters.sudo("", query) + if not isSudo and userId and userId != query.from_user.id: + await query.answer("Not Yours!", show_alert=True) + else: + await query.answer() + if data[1] == "close": + await delete_message(reply_to) + await delete_message(message) + elif data[1] == "back": + button = create_cancel_buttons(isSudo, userId) + await edit_message(message, "Choose tasks to cancel!", button) + elif data[1] == "bot": + button = create_cancel_buttons(isSudo, "") + await edit_message(message, "Choose tasks to cancel!", button) + elif data[1] == "user": + button = create_cancel_buttons(isSudo, query.from_user.id) + await edit_message(message, "Choose tasks to cancel!", button) + elif data[1] == "ms": + buttons = button_build.ButtonMaker() + buttons.callback("Yes!", f"canall {data[2]} confirm {userId}") + buttons.callback("Back", f"canall back confirm {userId}") + buttons.callback("Close", f"canall close confirm {userId}") + button = buttons.menu(2) + await edit_message( + message, f"Are you sure you want to cancel all {data[2]} tasks", button + ) + else: + button = create_cancel_buttons(isSudo, userId) + await edit_message(message, "Choose tasks to cancel.", button) + res = await cancel_all(data[1], userId) + if not res: + await send_message(reply_to, f"No matching tasks for {data[1]}!") + + +bot.add_handler( + MessageHandler( + cancel_task, + filters=regex(r"^/stop(_\w+)?(?!all)") & CustomFilters.authorized, + ) +) +bot.add_handler( + MessageHandler( + cancell_all_buttons, + filters=command(BotCommands.CancelAllCommand) & CustomFilters.authorized, + ) +) +bot.add_handler(CallbackQueryHandler(cancel_all_update, filters=regex("^canall"))) +bot.add_handler(CallbackQueryHandler(cancel_multi, filters=regex("^stopm"))) diff --git a/bot/modules/clone.py b/bot/modules/clone.py index 17f19a783..820c72473 100644 --- a/bot/modules/clone.py +++ b/bot/modules/clone.py @@ -1,43 +1,40 @@ from json import loads -from asyncio import sleep, gather +from asyncio import gather from secrets import token_hex -from aiofiles.os import path as aiopath from pyrogram.filters import command from pyrogram.handlers import MessageHandler -from bot import LOGGER, bot, config_dict, download_dict, download_dict_lock +from bot import LOGGER, bot, task_dict, task_dict_lock from bot.helper.ext_utils.bot_utils import ( + COMMAND_USAGE, cmd_exec, new_task, arg_parser, - is_share_link, sync_to_async, - fetch_user_tds, +) +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.links_utils import ( + is_gdrive_id, + is_share_link, is_gdrive_link, is_rclone_path, - get_telegraph_list, ) -from bot.helper.ext_utils.exceptions import DirectDownloadLinkError -from bot.helper.aeon_utils.nsfw_check import nsfw_precheck -from bot.helper.aeon_utils.send_react import send_react -from bot.helper.ext_utils.help_strings import CLONE_HELP_MESSAGE -from bot.helper.ext_utils.task_manager import task_utils, limit_checker +from bot.helper.ext_utils.task_manager import stop_duplicate_check +from bot.helper.aeon_utils.access_check import error_check +from bot.helper.listeners.task_listener import TaskListener from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.listeners.tasks_listener import MirrorLeechListener from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import ( delete_links, - edit_message, send_message, delete_message, - one_minute_del, five_minute_del, sendStatusMessage, ) -from bot.helper.mirror_leech_utils.rclone_utils.list import RcloneList +from bot.helper.mirror_leech_utils.gdrive_utils.clone import gdClone +from bot.helper.mirror_leech_utils.gdrive_utils.count import gdCount from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus from bot.helper.mirror_leech_utils.download_utils.direct_link_generator import ( @@ -45,317 +42,264 @@ ) -async def rcloneNode(client, message, link, dst_path, rcf, tag): - if link == "rcl": - link = await RcloneList(client, message).get_rclone_path("rcd") - if not is_rclone_path(link): - await send_message(message, link) - return +class Clone(TaskListener): + def __init__( + self, + client, + message, + _=None, + __=None, + ___=None, + ____=None, + _____=None, + bulk=None, + multi_tag=None, + options="", + ): + if bulk is None: + bulk = [] + self.message = message + self.client = client + self.multi_tag = multi_tag + self.options = options + self.same_dir = {} + self.bulk = bulk + super().__init__() + self.isClone = True - if link.startswith("mrcc:"): - link = link.split("mrcc:", 1)[1] - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" + @new_task + async def new_event(self): + error_msg, error_button = await error_check(self.message) + if error_msg: + await delete_links(self.message) + error = await send_message(self.message, error_msg, error_button) + await five_minute_del(error) + return + text = self.message.text.split("\n") + input_list = text[0].split(" ") - if not await aiopath.exists(config_path): - await send_message(message, f"Rclone Config: {config_path} not Exists!") - return + args = { + "link": "", + "-i": 0, + "-b": False, + "-up": "", + "-rcf": "", + "-sync": False, + } - if dst_path == "rcl" or config_dict["RCLONE_PATH"] == "rcl": - dst_path = await RcloneList(client, message).get_rclone_path( - "rcu", config_path - ) - if not is_rclone_path(dst_path): - await send_message(message, dst_path) - return + arg_parser(input_list[1:], args) - dst_path = (dst_path or config_dict["RCLONE_PATH"]).strip("/") - if not is_rclone_path(dst_path): - await send_message(message, "Given Wrong RClone Destination!") - return - if dst_path.startswith("mrcc:"): - if config_path != f"tanha/{message.from_user.id}.conf": - await send_message( - message, "You should use same rcl.conf to clone between pathies!" - ) - return - dst_path = dst_path.lstrip("mrcc:") - elif config_path != "rcl.conf": - await send_message( - message, "You should use same rcl.conf to clone between pathies!" - ) - return + try: + self.multi = int(args["-i"]) + except Exception: + self.multi = 0 - remote, src_path = link.split(":", 1) - src_path = src_path.strip("/") + self.upDest = args["-up"] + self.rcFlags = args["-rcf"] + self.link = args["link"] - cmd = [ - "xone", - "lsjson", - "--fast-list", - "--stat", - "--no-modtime", - "--config", - config_path, - f"{remote}:{src_path}", - ] - res = await cmd_exec(cmd) - if res[2] != 0: - if res[2] != -9: - msg = f"Error: While getting RClone Stats. Path: {remote}:{src_path}. Stderr: {res[1][:4000]}" - await send_message(message, msg) - return - rstat = loads(res[0]) - if rstat["IsDir"]: - name = src_path.rsplit("/", 1)[-1] if src_path else remote - dst_path += name if dst_path.endswith(":") else f"/{name}" - mime_type = "Folder" - else: - name = src_path.rsplit("/", 1)[-1] - mime_type = rstat["MimeType"] + is_bulk = args["-b"] + sync = args["-sync"] + bulk_start = 0 + bulk_end = 0 - listener = MirrorLeechListener(message, tag=tag) - await listener.on_download_start() + if not isinstance(is_bulk, bool): + dargs = is_bulk.split(":") + bulk_start = dargs[0] or 0 + if len(dargs) == 2: + bulk_end = dargs[1] or 0 + is_bulk = True - RCTransfer = RcloneTransferHelper(listener, name) - LOGGER.info( - f"Clone Started: Name: {name} - Source: {link} - Destination: {dst_path}" - ) - gid = token_hex(4) - async with download_dict_lock: - download_dict[message.id] = RcloneStatus(RCTransfer, message, gid, "cl") - await sendStatusMessage(message) - link, destination = await RCTransfer.clone( - config_path, remote, src_path, dst_path, rcf, mime_type - ) - if not link: - return - LOGGER.info(f"Cloning Done: {name}") - cmd1 = [ - "xone", - "lsf", - "--fast-list", - "-R", - "--files-only", - "--config", - config_path, - destination, - ] - cmd2 = [ - "xone", - "lsf", - "--fast-list", - "-R", - "--dirs-only", - "--config", - config_path, - destination, - ] - cmd3 = [ - "xone", - "size", - "--fast-list", - "--json", - "--config", - config_path, - destination, - ] - res1, res2, res3 = await gather(cmd_exec(cmd1), cmd_exec(cmd2), cmd_exec(cmd3)) - if res1[2] != res2[2] != res3[2] != 0: - if res1[2] == -9: + if is_bulk: + await self.initBulk(input_list, bulk_start, bulk_end, Clone) return - files = None - folders = None - size = 0 - LOGGER.error( - f"Error: While getting RClone Stats. Path: {destination}. Stderr: {res1[1][:4000]}" - ) - else: - files = len(res1[0].split("\n")) - folders = len(res2[0].split("\n")) - rsize = loads(res3[0]) - size = rsize["bytes"] - await listener.onUploadComplete( - link, size, files, folders, mime_type, name, destination - ) + await self.getTag(text) -async def gdcloneNode(message, link, listen_up): - if not is_gdrive_link(link) and is_share_link(link): - process_msg = await send_message( - message, f"Processing Link:{link}
" - ) - try: - link = await sync_to_async(direct_link_generator, link) - LOGGER.info(f"Generated link: {link}") - await edit_message( - process_msg, f"Generated Link:{link}
" + if not self.link and (reply_to := self.message.reply_to_message): + self.link = reply_to.text.split("\n", 1)[0].strip() + + self.run_multi(input_list, "", Clone) + + if len(self.link) == 0: + await send_message( + self.message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1] ) - except DirectDownloadLinkError as e: - LOGGER.error(str(e)) - if str(e).startswith("ERROR:"): - await edit_message(process_msg, str(e)) - await delete_links(message) - await one_minute_del(process_msg) - return - await delete_message(process_msg) - if is_gdrive_link(link): - gd = GoogleDriveHelper() - name, mime_type, size, files, _ = await sync_to_async(gd.count, link) - if mime_type is None: - await send_message(message, name) return - if config_dict["STOP_DUPLICATE"]: - LOGGER.info("Checking File/Folder if already in Drive...") - telegraph_content, contents_no = await sync_to_async( - gd.drive_list, name, True, True - ) - if telegraph_content: - msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:" - button = await get_telegraph_list(telegraph_content) - await send_message(message, msg, button) - return - listener = MirrorLeechListener( - message, - tag=listen_up[0], - is_clone=True, - drive_id=listen_up[1], - index_link=listen_up[2], - ) - if limit_exceeded := await limit_checker(size, listener): - await listener.onUploadError(limit_exceeded) + LOGGER.info(self.link) + try: + await self.beforeStart() + except Exception as e: + await send_message(self.message, e) return - await listener.on_download_start() - LOGGER.info(f"Clone Started: Name: {name} - Source: {link}") - drive = GoogleDriveHelper(name, listener=listener) - if files <= 20: - msg = await send_message(message, f"Cloning:{link}
") - link, size, mime_type, files, folders = await sync_to_async( - drive.clone, link, listener.drive_id + await self._proceedToClone(sync) + + async def _proceedToClone(self, sync): + if is_share_link(self.link): + try: + self.link = await sync_to_async(direct_link_generator, self.link) + LOGGER.info(f"Generated link: {self.link}") + except DirectDownloadLinkException as e: + LOGGER.error(str(e)) + if str(e).startswith("ERROR:"): + await send_message(self.message, str(e)) + return + if is_gdrive_link(self.link) or is_gdrive_id(self.link): + self.name, mime_type, self.size, files, _ = await sync_to_async( + gdCount().count, self.link, self.userId ) - await delete_message(msg) - else: - gid = token_hex(4) - async with download_dict_lock: - download_dict[message.id] = GdriveStatus( - drive, size, message, gid, "cl" + if mime_type is None: + await send_message(self.message, self.name) + return + msg, button = await stop_duplicate_check(self) + if msg: + await send_message(self.message, msg, button) + return + await self.on_download_start() + LOGGER.info(f"Clone Started: Name: {self.name} - Source: {self.link}") + drive = gdClone(self) + if files <= 10: + msg = await send_message( + self.message, f"Cloning:{self.link}
" ) - await sendStatusMessage(message) - link, size, mime_type, files, folders = await sync_to_async( - drive.clone, link, listener.drive_id + else: + msg = "" + gid = token_hex(4) + async with task_dict_lock: + task_dict[self.mid] = GdriveStatus(self, drive, gid, "cl") + if self.multi <= 1: + await sendStatusMessage(self.message) + flink, mime_type, files, folders, dir_id = await sync_to_async( + drive.clone ) - if not link: - return - LOGGER.info(f"Cloning Done: {name}") - await listener.onUploadComplete(link, size, files, folders, mime_type, name) - else: - reply_message = await send_message(message, CLONE_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) + if msg: + await delete_message(msg) + if not flink: + return + await self.onUploadComplete( + flink, files, folders, mime_type, dir_id=dir_id + ) + LOGGER.info(f"Cloning Done: {self.name}") + elif is_rclone_path(self.link): + if self.link.startswith("mrcc:"): + self.link = self.link.replace("mrcc:", "", 1) + self.upDest = self.upDest.replace("mrcc:", "", 1) + config_path = f"rclone/{self.userId}.conf" + else: + config_path = "rclone.conf" + remote, src_path = self.link.split(":", 1) + src_path = src_path.strip("/") -@new_task -async def clone(client, message): - await send_react(message) - input_list = message.text.split(" ") - arg_base = { - "link": "", - "-i": "0", - "-up": "", - "-rcf": "", - "-id": "", - "-index": "", - } - args = arg_parser(input_list[1:], arg_base) - i = args["-i"] - dst_path = args["-up"] - rcf = args["-rcf"] - link = args["link"] - drive_id = args["-id"] - index_link = args["-index"] - multi = int(i) if i.isdigit() else 0 + cmd = [ + "xone", + "lsjson", + "--fast-list", + "--stat", + "--no-modtime", + "--config", + config_path, + f"{remote}:{src_path}", + ] + res = await cmd_exec(cmd) + if res[2] != 0: + if res[2] != -9: + msg = f"Error: While getting rclone stat. Path: {remote}:{src_path}. Stderr: {res[1][:4000]}" + await send_message(self.message, msg) + return + rstat = loads(res[0]) + if rstat["IsDir"]: + self.name = src_path.rsplit("/", 1)[-1] if src_path else remote + self.upDest += ( + self.name if self.upDest.endswith(":") else f"/{self.name}" + ) - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - if not link and (reply_to := message.reply_to_message): - link = reply_to.text.split("\n", 1)[0].strip() + mime_type = "Folder" + else: + self.name = src_path.rsplit("/", 1)[-1] + mime_type = rstat["MimeType"] - @new_task - async def __run_multi(): - if multi > 1: - await sleep(5) - msg = [s.strip() for s in input_list] - index = msg.index("-i") - msg[index + 1] = f"{multi - 1}" - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1 + await self.on_download_start() + + RCTransfer = RcloneTransferHelper(self) + LOGGER.info( + f"Clone Started: Name: {self.name} - Source: {self.link} - Destination: {self.upDest}" ) - nextmsg = await send_message(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id + gid = token_hex(4) + async with task_dict_lock: + task_dict[self.mid] = RcloneStatus(self, RCTransfer, gid, "cl") + if self.multi <= 1: + await sendStatusMessage(self.message) + method = "sync" if sync else "copy" + flink, destination = await RCTransfer.clone( + config_path, + remote, + src_path, + mime_type, + method, + ) + if not destination: + return + LOGGER.info(f"Cloning Done: {self.name}") + cmd1 = [ + "xone", + "lsf", + "--fast-list", + "-R", + "--files-only", + "--config", + config_path, + destination, + ] + cmd2 = [ + "xone", + "lsf", + "--fast-list", + "-R", + "--dirs-only", + "--config", + config_path, + destination, + ] + cmd3 = [ + "xone", + "size", + "--fast-list", + "--json", + "--config", + config_path, + destination, + ] + res1, res2, res3 = await gather( + cmd_exec(cmd1), + cmd_exec(cmd2), + cmd_exec(cmd3), + ) + if res1[2] != res2[2] != res3[2] != 0: + if res1[2] == -9: + return + files = None + folders = None + self.size = 0 + LOGGER.error( + f"Error: While getting rclone stat. Path: {destination}. Stderr: {res1[1][:4000]}" + ) + else: + files = len(res1[0].split("\n")) + folders = len(res2[0].strip().split("\n")) if res2[0] else 0 + rsize = loads(res3[0]) + self.size = rsize["bytes"] + await self.onUploadComplete( + flink, files, folders, mime_type, destination + ) + else: + await send_message( + self.message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1] ) - nextmsg.from_user = message.from_user - await sleep(5) - clone(client, nextmsg) - - __run_multi() - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if len(link) == 0: - reply_message = await send_message(message, CLONE_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) - return None - error_msg = [] - error_button = None - if await nsfw_precheck(message): - error_msg.extend(["NSFW detected"]) - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - if error_msg: - final_msg = f"Hey, {tag}!\n" - for __i, __msg in enumerate(error_msg, 1): - final_msg += f"\n{__i}: {__msg}" - if error_button is not None: - error_button = error_button.column(2) - await delete_links(message) - force_m = await send_message(message, final_msg, error_button) - await five_minute_del(force_m) - return None - if is_rclone_path(link): - if not await aiopath.exists("rcl.conf") and not await aiopath.exists( - f"tanha/{message.from_user.id}.conf" - ): - await send_message(message, "Rclone Config Not exists!") - return None - if not config_dict["RCLONE_PATH"] and not dst_path: - await send_message(message, "Destination not specified!") - await delete_links(message) - return None - await rcloneNode(client, message, link, dst_path, rcf, tag) - else: - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - if drive_id and not await sync_to_async( - GoogleDriveHelper().getFolderData, drive_id - ): - return await send_message(message, "Google Drive ID validation failed!!") - if not config_dict["GDRIVE_ID"] and not drive_id: - await send_message(message, "GDRIVE_ID not Provided!") - await delete_links(message) - return None - await gdcloneNode(message, link, [tag, drive_id, index_link]) - await delete_links(message) - return None +async def clone(client, message): + Clone(client, message).new_event() bot.add_handler( diff --git a/bot/modules/count.py b/bot/modules/count.py deleted file mode 100644 index b04468ed3..000000000 --- a/bot/modules/count.py +++ /dev/null @@ -1,63 +0,0 @@ -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler - -from bot import bot -from bot.helper.ext_utils.bot_utils import ( - new_task, - sync_to_async, - is_gdrive_link, - get_readable_file_size, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, - delete_message, -) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper - - -@new_task -async def countNode(_, message): - async def format_node_count(name, mime_type, size, files, folders, tag): - msg = f"{name}\n\n" - msg += f"• Size: {get_readable_file_size(size)}\n" - if mime_type == "Folder": - msg += f"• SubFolders: {folders}\n" - msg += f"• Files: {files}\n" - msg += f"• Counted by: {tag}\n" - msg += f"• User ID:{message.from_user.id}
\n" - return msg - - args = message.text.split() - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - - link = args[1] if len(args) > 1 else "" - if len(link) == 0 and (reply_to := message.reply_to_message): - link = reply_to.text.split(maxsplit=1)[0].strip() - - if is_gdrive_link(link): - msg = await send_message(message, f"Counting:{link}
") - gd = GoogleDriveHelper() - name, mime_type, size, files, folders = await sync_to_async(gd.count, link) - if mime_type is None: - await send_message(message, name) - await delete_message(msg) - return - msg = await format_node_count(name, mime_type, size, files, folders, tag) - else: - msg = "Send a Google Drive link along with the command or reply to a link with the command." - await send_message(message, msg) - await delete_links(message) - - -bot.add_handler( - MessageHandler( - countNode, - filters=command(BotCommands.CountCommand) & CustomFilters.authorized, - ) -) diff --git a/bot/modules/executor.py b/bot/modules/exec.py similarity index 92% rename from bot/modules/executor.py rename to bot/modules/exec.py index ffa794b94..745bef9e3 100644 --- a/bot/modules/executor.py +++ b/bot/modules/exec.py @@ -46,7 +46,7 @@ async def send_response(msg, message): @new_task -async def evaluate(_, message): +async def evaluate(client, message): content = message.text.split(maxsplit=1) if len(content) == 1: await send_response("No command to execute.", message) @@ -55,7 +55,7 @@ async def evaluate(_, message): @new_task -async def execute(_, message): +async def execute(client, message): content = message.text.split(maxsplit=1) if len(content) == 1: await send_response("No command to execute.", message) @@ -106,21 +106,21 @@ async def execute_code(func, message): bot.add_handler( MessageHandler( - evaluate, filters=command(BotCommands.EvalCommand) & CustomFilters.sudo + evaluate, filters=command(BotCommands.AExecCommand) & CustomFilters.owner ) ) bot.add_handler( MessageHandler( - execute, filters=command(BotCommands.ExecCommand) & CustomFilters.sudo + execute, filters=command(BotCommands.ExecCommand) & CustomFilters.owner ) ) bot.add_handler( EditedMessageHandler( - evaluate, filters=command(BotCommands.EvalCommand) & CustomFilters.sudo + evaluate, filters=command(BotCommands.AExecCommand) & CustomFilters.owner ) ) bot.add_handler( EditedMessageHandler( - execute, filters=command(BotCommands.ExecCommand) & CustomFilters.sudo + execute, filters=command(BotCommands.ExecCommand) & CustomFilters.owner ) ) diff --git a/bot/modules/file_selector.py b/bot/modules/file_selector.py new file mode 100644 index 000000000..0402d5184 --- /dev/null +++ b/bot/modules/file_selector.py @@ -0,0 +1,78 @@ +from contextlib import suppress + +from aiofiles.os import path as aiopath +from aiofiles.os import remove +from pyrogram.filters import regex +from pyrogram.handlers import CallbackQueryHandler + +from bot import ( + LOGGER, + bot, + aria2, + xnox_client, +) +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.status_utils import getTaskByGid +from bot.helper.telegram_helper.message_utils import ( + delete_message, + sendStatusMessage, +) + + +async def get_confirm(_, query): + user_id = query.from_user.id + data = query.data.split() + message = query.message + task = await getTaskByGid(data[2]) + if task is None: + await query.answer("This task has been cancelled!", show_alert=True) + await delete_message(message) + return + if user_id != task.listener.userId: + await query.answer("This task is not for you!", show_alert=True) + elif data[1] == "pin": + await query.answer(data[3], show_alert=True) + elif data[1] == "done": + await query.answer() + id_ = data[3] + if hasattr(task, "seeding"): + if task.listener.isQbit: + tor_info = ( + await sync_to_async(xnox_client.torrents_info, torrent_hash=id_) + )[0] + path = tor_info.content_path.rsplit("/", 1)[0] + res = await sync_to_async( + xnox_client.torrents_files, torrent_hash=id_ + ) + for f in res: + if f.priority == 0: + f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"] + for f_path in f_paths: + if await aiopath.exists(f_path): + with suppress(Exception): + await remove(f_path) + if not task.queued: + await sync_to_async( + xnox_client.torrents_resume, torrent_hashes=id_ + ) + else: + res = await sync_to_async(aria2.client.get_files, id_) + for f in res: + if f["selected"] == "false" and await aiopath.exists(f["path"]): + with suppress(Exception): + await remove(f["path"]) + if not task.queued: + try: + await sync_to_async(aria2.client.unpause, id_) + except Exception as e: + LOGGER.error( + f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!" + ) + await sendStatusMessage(message) + await delete_message(message) + else: + await delete_message(message) + await task.cancel_task() + + +bot.add_handler(CallbackQueryHandler(get_confirm, filters=regex("^sel"))) diff --git a/bot/modules/gd_count.py b/bot/modules/gd_count.py new file mode 100644 index 000000000..89425d6c6 --- /dev/null +++ b/bot/modules/gd_count.py @@ -0,0 +1,54 @@ +from pyrogram.filters import command +from pyrogram.handlers import MessageHandler + +from bot import bot +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.links_utils import is_gdrive_link +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.message_utils import send_message, delete_message +from bot.helper.mirror_leech_utils.gdrive_utils.count import gdCount + + +@new_task +async def countNode(_, message): + args = message.text.split() + user = message.from_user or message.sender_chat + if username := user.username: + tag = f"@{username}" + else: + tag = message.from_user.mention + + link = args[1] if len(args) > 1 else "" + if len(link) == 0 and (reply_to := message.reply_to_message): + link = reply_to.text.split(maxsplit=1)[0].strip() + + if is_gdrive_link(link): + msg = await send_message(message, f"Counting:{link}
") + name, mime_type, size, files, folders = await sync_to_async( + gdCount().count, link, user.id + ) + if mime_type is None: + await send_message(message, name) + return + await delete_message(msg) + msg = f"Name:{name}
" + msg += f"\n\nSize: {get_readable_file_size(size)}" + msg += f"\n\nType: {mime_type}" + if mime_type == "Folder": + msg += f"\nSubFolders: {folders}" + msg += f"\nFiles: {files}" + msg += f"\n\ncc: {tag}" + else: + msg = "Send Gdrive link along with command or by replying to the link by command" + + await send_message(message, msg) + + +bot.add_handler( + MessageHandler( + countNode, + filters=command(BotCommands.CountCommand) & CustomFilters.authorized, + ) +) diff --git a/bot/modules/delete.py b/bot/modules/gd_delete.py similarity index 58% rename from bot/modules/delete.py rename to bot/modules/gd_delete.py index f70909c00..22d449f29 100644 --- a/bot/modules/delete.py +++ b/bot/modules/gd_delete.py @@ -2,45 +2,34 @@ from pyrogram.handlers import MessageHandler from bot import LOGGER, bot -from bot.helper.ext_utils.bot_utils import new_task, sync_to_async, is_gdrive_link +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.links_utils import is_gdrive_link from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import ( send_message, - delete_message, - one_minute_del, + auto_delete_message, ) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper - - -async def delete_file(link): - try: - LOGGER.info(link) - drive = GoogleDriveHelper() - return await sync_to_async(drive.deletefile, link) - except Exception as e: - LOGGER.error(f"Error deleting Google Drive file: {e!s}") - return f"An error occurred: {e!s}" +from bot.helper.mirror_leech_utils.gdrive_utils.delete import gdDelete @new_task async def deletefile(_, message): args = message.text.split() + user = message.from_user or message.sender_chat if len(args) > 1: link = args[1] elif reply_to := message.reply_to_message: link = reply_to.text.split(maxsplit=1)[0].strip() else: link = "" - if is_gdrive_link(link): - msg = await delete_file(link) + LOGGER.info(link) + msg = await sync_to_async(gdDelete().deletefile, link, user.id) else: - msg = "Send a Google Drive link along with the command or reply to the link with the command." - + msg = "Send Gdrive link along with command or by replying to the link by command" reply_message = await send_message(message, msg) - await delete_message(message) - await one_minute_del(reply_message) + await auto_delete_message(message, reply_message) bot.add_handler( diff --git a/bot/modules/gd_search.py b/bot/modules/gd_search.py new file mode 100644 index 000000000..0d656aa86 --- /dev/null +++ b/bot/modules/gd_search.py @@ -0,0 +1,111 @@ +from pyrogram.filters import regex, command +from pyrogram.handlers import MessageHandler, CallbackQueryHandler + +from bot import LOGGER, bot, user_data +from bot.helper.ext_utils.bot_utils import ( + new_task, + sync_to_async, + get_telegraph_list, +) +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import edit_message, send_message +from bot.helper.mirror_leech_utils.gdrive_utils.search import gdSearch + + +async def list_buttons(user_id, is_recursive=True, user_token=False): + buttons = ButtonMaker() + buttons.callback( + "Folders", f"list_types {user_id} folders {is_recursive} {user_token}" + ) + buttons.callback( + "Files", f"list_types {user_id} files {is_recursive} {user_token}" + ) + buttons.callback( + "Both", f"list_types {user_id} both {is_recursive} {user_token}" + ) + buttons.callback( + f"Recursive: {is_recursive}", + f"list_types {user_id} rec {is_recursive} {user_token}", + ) + buttons.callback( + f"User Token: {user_token}", + f"list_types {user_id} ut {is_recursive} {user_token}", + ) + buttons.callback("Cancel", f"list_types {user_id} cancel") + return buttons.menu(2) + + +async def _list_drive(key, message, item_type, is_recursive, user_token, user_id): + LOGGER.info(f"listing: {key}") + if user_token: + user_dict = user_data.get(user_id, {}) + target_id = user_dict.get("gdrive_id", "") or "" + LOGGER.info(target_id) + else: + target_id = "" + telegraph_content, contents_no = await sync_to_async( + gdSearch(is_recursive=is_recursive, itemType=item_type).drive_list, + key, + target_id, + user_id, + ) + if telegraph_content: + try: + button = await get_telegraph_list(telegraph_content) + except Exception as e: + await edit_message(message, e) + return + msg = f"Found {contents_no} result for {key}" + await edit_message(message, msg, button) + else: + await edit_message(message, f"No result found for {key}") + + +@new_task +async def select_type(_, query): + user_id = query.from_user.id + message = query.message + key = message.reply_to_message.text.split(maxsplit=1)[1].strip() + data = query.data.split() + if user_id != int(data[1]): + return await query.answer(text="Not Yours!", show_alert=True) + if data[2] == "rec": + await query.answer() + is_recursive = not bool(eval(data[3])) + buttons = await list_buttons(user_id, is_recursive, eval(data[4])) + return await edit_message(message, "Choose list options:", buttons) + if data[2] == "ut": + await query.answer() + user_token = not bool(eval(data[4])) + buttons = await list_buttons(user_id, eval(data[3]), user_token) + return await edit_message(message, "Choose list options:", buttons) + if data[2] == "cancel": + await query.answer() + return await edit_message(message, "list has been canceled!") + await query.answer() + item_type = data[2] + is_recursive = eval(data[3]) + user_token = eval(data[4]) + await edit_message(message, f"Searching for {key}") + await _list_drive(key, message, item_type, is_recursive, user_token, user_id) + return None + + +async def gdrive_search(_, message): + if len(message.text.split()) == 1: + return await send_message(message, "Send a search key along with command") + user_id = message.from_user.id + buttons = await list_buttons(user_id) + await send_message(message, "Choose list options:", buttons) + return None + + +bot.add_handler( + MessageHandler( + gdrive_search, + filters=command(BotCommands.ListCommand) & CustomFilters.authorized, + ) +) +bot.add_handler(CallbackQueryHandler(select_type, filters=regex("^list_types"))) diff --git a/bot/modules/help.py b/bot/modules/help.py new file mode 100644 index 000000000..7a6e5b62e --- /dev/null +++ b/bot/modules/help.py @@ -0,0 +1,50 @@ +from pyrogram.filters import regex +from pyrogram.handlers import CallbackQueryHandler + +from bot import bot +from bot.helper.ext_utils.bot_utils import COMMAND_USAGE +from bot.helper.ext_utils.help_messages import ( + YT_HELP_DICT, + CLONE_HELP_DICT, + MIRROR_HELP_DICT, +) +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import edit_message, delete_message + + +async def argUsage(_, query): + data = query.data.split() + message = query.message + if data[1] == "close": + await delete_message(message) + elif data[1] == "back": + if data[2] == "m": + await edit_message( + message, COMMAND_USAGE["mirror"][0], COMMAND_USAGE["mirror"][1] + ) + elif data[2] == "y": + await edit_message( + message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][1] + ) + elif data[2] == "c": + await edit_message( + message, COMMAND_USAGE["clone"][0], COMMAND_USAGE["clone"][1] + ) + elif data[1] == "mirror": + buttons = ButtonMaker() + buttons.callback("Back", "help back m") + button = buttons.menu() + await edit_message(message, MIRROR_HELP_DICT[data[2]], button) + elif data[1] == "yt": + buttons = ButtonMaker() + buttons.callback("Back", "help back y") + button = buttons.menu() + await edit_message(message, YT_HELP_DICT[data[2]], button) + elif data[1] == "clone": + buttons = ButtonMaker() + buttons.callback("Back", "help back c") + button = buttons.menu() + await edit_message(message, CLONE_HELP_DICT[data[2]], button) + + +bot.add_handler(CallbackQueryHandler(argUsage, filters=regex("^help"))) diff --git a/bot/modules/images.py b/bot/modules/images.py deleted file mode 100644 index b59a155ae..000000000 --- a/bot/modules/images.py +++ /dev/null @@ -1,174 +0,0 @@ -from asyncio import sleep as asleep - -from telegraph import upload_file -from aiofiles.os import remove as aioremove -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import IMAGES, LOGGER, DATABASE_URL, bot -from bot.helper.ext_utils.bot_utils import new_task, handle_index -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - edit_message, - send_message, - delete_message, -) - - -@new_task -async def picture_add(_, message): - reply = message.reply_to_message - msg = await send_message(message, "Fetching input...") - - if len(message.command) > 1 or (reply and reply.text): - msg_text = reply.text if reply else message.command[1] - if not msg_text.startswith("http"): - return await edit_message( - msg, "This is not a valid link. It must start with 'http'." - ) - graph_url = msg_text.strip() - await edit_message(msg, f"Adding your link:{graph_url}
") - - elif reply and reply.photo: - if reply.photo.file_size > 5242880 * 2: - return await edit_message( - msg, "Media format is not supported. Only photos are allowed." - ) - - try: - photo_dir = await reply.download() - await edit_message( - msg, "Now, uploading tograph.org
, Please Wait..." - ) - await asleep(1) - graph_url = f"https://graph.org{upload_file(photo_dir)[0]}" - LOGGER.info(f"Telegraph link : {graph_url}") - except Exception as e: - LOGGER.error(f"Images Error: {e!s}") - await edit_message(msg, str(e)) - finally: - await aioremove(photo_dir) - - else: - help_msg = f"Add an image using /{BotCommands.AddImageCommand} followed by IMAGE_LINK, or reply to an image with /{BotCommands.AddImageCommand}." - return await edit_message(msg, help_msg) - - IMAGES.append(graph_url) - - if DATABASE_URL: - await DbManager().update_config({"IMAGES": IMAGES}) - - await asleep(1.5) - await edit_message( - msg, - f"Successfully added to the images list!\n\nTotal images: {len(IMAGES)}", - ) - return None - - -async def pictures(_, message): - if not IMAGES: - await send_message( - message, - f"No images to display! Add images using /{BotCommands.AddImageCommand}.", - ) - else: - to_edit = await send_message(message, "Generating a grid of your images...") - buttons = ButtonMaker() - user_id = message.from_user.id - buttons.callback("<<", f"images {user_id} turn -1") - buttons.callback(">>", f"images {user_id} turn 1") - buttons.callback("Remove image", f"images {user_id} remove 0") - buttons.callback("Close", f"images {user_id} close") - buttons.callback("Remove all", f"images {user_id} removeall", "footer") - await delete_message(to_edit) - await send_message( - message, - f"Image No. : 1 / {len(IMAGES)}", - buttons.column(2), - IMAGES[0], - ) - - -@new_task -async def pics_callback(_, query): - message = query.message - user_id = query.from_user.id - data = query.data.split() - - if user_id != int(data[1]): - await query.answer(text="Not authorized user!", show_alert=True) - return - - if data[2] == "turn": - await query.answer() - ind = handle_index(int(data[3]), IMAGES) - no = len(IMAGES) - abs(ind + 1) if ind < 0 else ind + 1 - pic_info = f"Image No. : {no} / {len(IMAGES)}" - buttons = ButtonMaker() - buttons.callback("<<", f"images {data[1]} turn {ind-1}") - buttons.callback(">>", f"images {data[1]} turn {ind+1}") - buttons.callback("Remove Image", f"images {data[1]} remove {ind}") - buttons.callback("Close", f"images {data[1]} close") - buttons.callback("Remove all", f"images {data[1]} removeall", "footer") - await edit_message(message, pic_info, buttons.column(2), IMAGES[ind]) - - elif data[2] == "remove": - IMAGES.pop(int(data[3])) - if DATABASE_URL: - await DbManager().update_config({"IMAGES": IMAGES}) - query.answer("Image has been successfully deleted", show_alert=True) - - if len(IMAGES) == 0: - await query.message.delete() - await send_message( - message, - f"No images to display! Add images using /{BotCommands.AddImageCommand}.", - ) - return - - ind = int(data[3]) + 1 - ind = len(IMAGES) - abs(ind) if ind < 0 else ind - pic_info = f"Image No. : {ind+1} / {len(IMAGES)}" - buttons = ButtonMaker() - buttons.callback("<<", f"images {data[1]} turn {ind-1}") - buttons.callback(">>", f"images {data[1]} turn {ind+1}") - buttons.callback("Remove image", f"images {data[1]} remove {ind}") - buttons.callback("Close", f"images {data[1]} close") - buttons.callback("Remove all", f"images {data[1]} removeall", "footer") - await edit_message(message, pic_info, buttons.column(2), IMAGES[ind]) - - elif data[2] == "removeall": - IMAGES.clear() - if DATABASE_URL: - await DbManager().update_config({"IMAGES": IMAGES}) - await query.answer( - "All images have been successfully deleted.", show_alert=True - ) - await send_message( - message, - f"No images to display! Add images using /{BotCommands.AddImageCommand}.", - ) - await message.delete() - else: - await query.answer() - await message.delete() - await message.reply_to_message.delete() - - -bot.add_handler( - MessageHandler( - picture_add, - filters=command(BotCommands.AddImageCommand) & CustomFilters.authorized, - ) -) -bot.add_handler( - MessageHandler( - pictures, - filters=command(BotCommands.ImagesCommand) & CustomFilters.authorized, - ) -) -bot.add_handler(CallbackQueryHandler(pics_callback, filters=regex(r"^images"))) diff --git a/bot/modules/list.py b/bot/modules/list.py deleted file mode 100644 index 20cc18576..000000000 --- a/bot/modules/list.py +++ /dev/null @@ -1,111 +0,0 @@ -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import LOGGER, bot -from bot.helper.ext_utils.bot_utils import ( - new_task, - sync_to_async, - checking_access, - get_telegraph_list, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - isAdmin, - delete_links, - edit_message, - send_message, - one_minute_del, - five_minute_del, -) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper - - -async def list_buttons(user_id, isRecursive=True): - buttons = ButtonMaker() - buttons.callback("Folders", f"list_types {user_id} folders {isRecursive}") - buttons.callback("Files", f"list_types {user_id} files {isRecursive}") - buttons.callback("Both", f"list_types {user_id} both {isRecursive}") - buttons.callback( - f"Recursive: {isRecursive}", f"list_types {user_id} rec {isRecursive}" - ) - buttons.callback("Cancel", f"list_types {user_id} cancel") - return buttons.column(2) - - -async def _list_drive(key, message, item_type, isRecursive): - LOGGER.info(f"listing: {key}") - gdrive = GoogleDriveHelper() - telegraph_content, contents_no = await sync_to_async( - gdrive.drive_list, key, isRecursive=isRecursive, itemType=item_type - ) - if telegraph_content: - try: - button = await get_telegraph_list(telegraph_content) - except Exception as e: - await edit_message(message, e) - return - msg = f"Found {contents_no} result for {key}" - await edit_message(message, msg, button) - else: - await edit_message(message, f"No result found for {key}") - - -@new_task -async def select_type(_, query): - user_id = query.from_user.id - message = query.message - key = message.reply_to_message.text.split(maxsplit=1)[1].strip() - data = query.data.split() - if user_id != int(data[1]): - return await query.answer(text="Not Yours!", show_alert=True) - if data[2] == "rec": - await query.answer() - isRecursive = not bool(eval(data[3])) - buttons = await list_buttons(user_id, isRecursive) - return await edit_message(message, "Choose list options:", buttons) - if data[2] == "cancel": - await query.answer() - return await edit_message(message, "List has been canceled!") - await query.answer() - item_type = data[2] - isRecursive = eval(data[3]) - await edit_message(message, f"Searching for {key}...") - await _list_drive(key, message, item_type, isRecursive) - return None - - -@new_task -async def drive_list(_, message): - if len(message.text.split()) == 1: - reply_message = await send_message( - message, "Send a search key along with command" - ) - await delete_links(message) - await one_minute_del(reply_message) - return - user_id = message.from_user.id - if ( - not await isAdmin(message, user_id) - and message.chat.type != message.chat.type.PRIVATE - ): - msg, btn = await checking_access(user_id) - if msg is not None: - reply_message = await send_message(message, msg, btn.column(1)) - await delete_links(message) - await five_minute_del(reply_message) - return - buttons = await list_buttons(user_id) - reply_message = await send_message(message, "Choose list options:", buttons) - await five_minute_del(reply_message) - await delete_links(message) - - -bot.add_handler( - MessageHandler( - drive_list, - filters=command(BotCommands.ListCommand) & CustomFilters.authorized, - ) -) -bot.add_handler(CallbackQueryHandler(select_type, filters=regex("^list_types"))) diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py index d4dc87312..e685bcf27 100644 --- a/bot/modules/mediainfo.py +++ b/bot/modules/mediainfo.py @@ -13,12 +13,18 @@ from bot import LOGGER, bot from bot.helper.ext_utils.bot_utils import cmd_exec +from bot.helper.aeon_utils.access_check import token_check from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.aeon_utils.gen_mediainfo import parseinfo from bot.helper.ext_utils.telegraph_helper import telegraph from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import edit_message, send_message - -section_dict = {"General", "Video", "Audio", "Text", "Menu"} +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + delete_links, + edit_message, + send_message, + five_minute_del, +) async def gen_mediainfo(message, link=None, media=None, msg=None): @@ -34,14 +40,12 @@ async def gen_mediainfo(message, link=None, media=None, msg=None): headers = { "user-agent": "Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36" } - async with ( - aiohttp.ClientSession() as session, - session.get(link, headers=headers) as response, - aiopen(des_path, "wb") as f, - ): - async for chunk in response.content.iter_chunked(10000000): - await f.write(chunk) - break + async with aiohttp.ClientSession() as session: + async with session.get(link, headers=headers) as response: + async with aiopen(des_path, "wb") as f: + async for chunk in response.content.iter_chunked(10000000): + await f.write(chunk) + break elif media: des_path = ospath.join(path, media.file_name) if media.file_size <= 50000000: @@ -69,27 +73,16 @@ async def gen_mediainfo(message, link=None, media=None, msg=None): ) -def parseinfo(out): - tc = "" - trigger = False - for line in out.split("\n"): - for section in section_dict: - if line.startswith(section): - trigger = True - if not line.startswith("General"): - tc += "
" - tc += f"{line.replace('Text', 'Subtitle')}
" - break - if trigger: - tc += "" - trigger = False - else: - tc += line + "\n" - tc += "
" - return tc - - async def mediainfo(_, message): + user_id = message.from_user.id + buttons = ButtonMaker() + if message.chat.type != message.chat.type.PRIVATE: + msg, buttons = await token_check(user_id, buttons) + if msg is not None: + reply_message = await send_message(message, msg, buttons.menu(1)) + await delete_links(message) + await five_minute_del(reply_message) + return reply = message.reply_to_message help_msg = ( "By replying to media:" diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py index 8851a5a42..c04411999 100644 --- a/bot/modules/mirror_leech.py +++ b/bot/modules/mirror_leech.py @@ -1,46 +1,39 @@ -import contextlib from re import match as re_match from base64 import b64encode -from asyncio import sleep +from asyncio import create_task from aiofiles.os import path as aiopath from pyrogram.filters import command from pyrogram.handlers import MessageHandler -from bot import LOGGER, bot, user_data, config_dict +from bot import LOGGER, DOWNLOAD_DIR, bot from bot.helper.ext_utils.bot_utils import ( - is_url, + COMMAND_USAGE, new_task, - is_magnet, arg_parser, - is_mega_link, sync_to_async, - fetch_user_tds, + get_content_type, +) +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.links_utils import ( + is_url, + is_magnet, + is_gdrive_id, + is_mega_link, is_gdrive_link, is_rclone_path, - get_content_type, is_telegram_link, ) -from bot.helper.ext_utils.bulk_links import extract_bulk_links -from bot.helper.ext_utils.exceptions import DirectDownloadLinkError -from bot.helper.aeon_utils.nsfw_check import nsfw_precheck -from bot.helper.aeon_utils.send_react import send_react -from bot.helper.ext_utils.help_strings import MIRROR_HELP_MESSAGE -from bot.helper.ext_utils.task_manager import task_utils +from bot.helper.aeon_utils.access_check import error_check +from bot.helper.listeners.task_listener import TaskListener from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.listeners.tasks_listener import MirrorLeechListener from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import ( delete_links, - edit_message, send_message, - delete_message, - one_minute_del, five_minute_del, - get_tg_link_content, + get_tg_link_message, ) -from bot.helper.mirror_leech_utils.rclone_utils.list import RcloneList -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.mirror_leech_utils.download_utils.gd_download import add_gd_download from bot.helper.mirror_leech_utils.download_utils.mega_download import ( add_mega_download, @@ -63,385 +56,314 @@ ) -@new_task -async def _mirror_leech( - client, message, is_qbit=False, is_leech=False, same_dir=None, bulk=[] -): - await send_react(message) - user = message.from_user or message.sender_chat - user_id = user.id - user_dict = user_data.get(user_id, {}) - text = message.text.split("\n") - input_list = text[0].split(" ") - arg_base = { - "link": "", - "-t": "", - "-m": "", - "-n": "", - "-h": "", - "-u": "", - "-p": "", - "-up": "", - "-rcf": "", - "-id": "", - "-index": "", - "-d": False, - "-j": False, - "-s": False, - "-b": False, - "-e": False, - "-z": False, - "-i": "0", - "-ss": "0", - "-atc": "", - } - - args = arg_parser(input_list[1:], arg_base) - attachment = ( - args["-atc"] - or user_dict.get("attachment", "") - or config_dict["ATTACHMENT_URL"] - ) - i = args["-i"] - link = args["link"] - headers = args["-h"] - folder_name = args["-m"] - seed = args["-d"] - join = args["-j"] - select = args["-s"] - isBulk = args["-b"] - name = args["-n"] - extract = args["-e"] - compress = args["-z"] - up = args["-up"] - thumb = args["-t"] - rcf = args["-rcf"] - drive_id = args["-id"] - index_link = args["-index"] - ss = args["-ss"] - multi = int(i) if i.isdigit() else 0 - sshots = min(int(ss) if ss.isdigit() else 0, 10) - bulk_start = 0 - bulk_end = 0 - ratio = None - seed_time = None - reply_to = None - file_ = None - session = "" - - if link: - if is_magnet(link) or link.endswith(".torrent"): - is_qbit = True - elif not link and (reply_to := message.reply_to_message) and reply_to.text: - reply_text = reply_to.text.split("\n", 1)[0].strip() - if reply_text and is_magnet(reply_text): - is_qbit = True - if reply_to := message.reply_to_message: - file_ = getattr(reply_to, reply_to.media.value) if reply_to.media else None - if reply_to.document and ( - file_.mime_type == "application/x-bittorrent" - or file_.file_name.endswith(".torrent") - ): - is_qbit = True - if not isinstance(seed, bool): - dargs = seed.split(":") - ratio = dargs[0] or None - if len(dargs) == 2: - seed_time = dargs[1] or None - seed = True - - if not isinstance(isBulk, bool): - dargs = isBulk.split(":") - bulk_start = dargs[0] or None - if len(dargs) == 2: - bulk_end = dargs[1] or None - isBulk = True - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if folder_name and not isBulk: - seed = False +class Mirror(TaskListener): + def __init__( + self, + client, + message, + isQbit=False, + is_leech=False, + same_dir=None, + bulk=None, + multi_tag=None, + options="", + ): + if same_dir is None: + same_dir = {} + if bulk is None: + bulk = [] + self.message = message + self.client = client + self.multi_tag = multi_tag + self.options = options + self.same_dir = same_dir + self.bulk = bulk + super().__init__() + self.isQbit = isQbit + self.is_leech = is_leech + + @new_task + async def new_event(self): + error_msg, error_button = await error_check(self.message) + if error_msg: + await delete_links(self.message) + error = await send_message(self.message, error_msg, error_button) + await five_minute_del(error) + return None + + text = self.message.text.split("\n") + user_id = self.message.from_user.id + input_list = text[0].split(" ") + + args = { + "-d": False, + "-j": False, + "-s": False, + "-b": False, + "-e": False, + "-z": False, + "-sv": False, + "-ss": False, + "-i": 0, + "link": "", + "-n": "", + "-m": "", + "-up": "", + "-rcf": "", + "-au": "", + "-ap": "", + "-h": "", + "-t": "", + "-ca": "", + "-cv": "", + "-ns": "", + "-md": "", + } + + arg_parser(input_list[1:], args) + + self.select = args["-s"] + self.seed = args["-d"] + self.name = args["-n"] + self.upDest = args["-up"] + self.rcFlags = args["-rcf"] + self.link = args["link"] + self.compress = args["-z"] + self.extract = args["-e"] + self.join = args["-j"] + self.thumb = args["-t"] + self.sampleVideo = args["-sv"] + self.screenShots = args["-ss"] + self.convertAudio = args["-ca"] + self.convertVideo = args["-cv"] + self.nameSub = args["-ns"] + self.metadata = args["-md"] + + headers = args["-h"] + is_bulk = args["-b"] + folder_name = args["-m"] + + bulk_start = 0 + bulk_end = 0 ratio = None seed_time = None - folder_name = f"/{folder_name}" - if same_dir is None: - same_dir = {"total": multi, "tasks": set(), "name": folder_name} - same_dir["tasks"].add(message.id) + reply_to = None + file_ = None + session = "" - if isBulk: try: - bulk = await extract_bulk_links(message, bulk_start, bulk_end) - if len(bulk) == 0: - raise ValueError("Bulk Empty!") + self.multi = int(args["-i"]) except Exception: - await send_message( - message, - "Reply to text file or tg message that have links seperated by new line!", - ) - return None - b_msg = input_list[:1] - b_msg.append(f"{bulk[0]} -i {len(bulk)}") - nextmsg = await send_message(message, " ".join(b_msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - nextmsg.from_user = message.from_user - _mirror_leech(client, nextmsg, is_qbit, is_leech, same_dir, bulk) - return None + self.multi = 0 - if len(bulk) != 0: - del bulk[0] + if not isinstance(self.seed, bool): + dargs = self.seed.split(":") + ratio = dargs[0] or None + if len(dargs) == 2: + seed_time = dargs[1] or None + self.seed = True + + if not isinstance(is_bulk, bool): + dargs = is_bulk.split(":") + bulk_start = dargs[0] or 0 + if len(dargs) == 2: + bulk_end = dargs[1] or 0 + is_bulk = True + + if not is_bulk: + if folder_name: + self.seed = False + ratio = None + seed_time = None + folder_name = f"/{folder_name}" + if not self.same_dir: + self.same_dir = { + "total": self.multi, + "tasks": set(), + "name": folder_name, + } + self.same_dir["tasks"].add(self.mid) + elif self.same_dir: + self.same_dir["total"] -= 1 - @new_task - async def __run_multi(): - if multi <= 1: - return - await sleep(5) - if len(bulk) != 0: - msg = input_list[:1] - msg.append(f"{bulk[0]} -i {multi - 1}") - nextmsg = await send_message(message, " ".join(msg)) else: - msg = [s.strip() for s in input_list] - index = msg.index("-i") - msg[index + 1] = f"{multi - 1}" - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1 - ) - nextmsg = await send_message(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - if folder_name: - same_dir["tasks"].add(nextmsg.id) - nextmsg.from_user = message.from_user - await sleep(5) - _mirror_leech(client, nextmsg, is_qbit, is_leech, same_dir, bulk) - - __run_multi() - - path = f"/usr/src/app/downloads/{message.id}{folder_name}" - - if len(text) > 1 and text[1].startswith("Tag: "): - tag, id_ = text[1].split("Tag: ")[1].split() - message.from_user = await client.get_users(id_) - with contextlib.suppress(Exception): - await message.unpin() - elif sender_chat := message.sender_chat: - tag = sender_chat.title - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - if link and is_telegram_link(link): - try: - reply_to, session = await get_tg_link_content(link) - except Exception as e: - await send_message(message, f"ERROR: {e}") - await delete_links(message) + await self.initBulk(input_list, bulk_start, bulk_end, Mirror) return None - elif not link and (reply_to := message.reply_to_message) and reply_to.text: - reply_text = reply_to.text.split("\n", 1)[0].strip() - if reply_text and is_telegram_link(reply_text): + + if len(self.bulk) != 0: + del self.bulk[0] + + self.run_multi(input_list, folder_name, Mirror) + + await self.getTag(text) + + path = f"{DOWNLOAD_DIR}{self.mid}{folder_name}" + + if not self.link and (reply_to := self.message.reply_to_message): + if reply_to.text: + self.link = reply_to.text.split("\n", 1)[0].strip() + if is_telegram_link(self.link): try: - reply_to, session = await get_tg_link_content(reply_text) + reply_to, session = await get_tg_link_message(self.link, user_id) except Exception as e: - await send_message(message, f"ERROR: {e}") - await delete_links(message) - return None - - if reply_to: - file_ = getattr(reply_to, reply_to.media.value) if reply_to.media else None - if file_ is None: - reply_text = reply_to.text.split("\n", 1)[0].strip() - if ( - is_url(reply_text) - or is_magnet(reply_text) - or is_rclone_path(reply_text) - ): - link = reply_text - elif reply_to.document and ( - file_.mime_type == "application/x-bittorrent" - or file_.file_name.endswith(".torrent") - ): - link = await reply_to.download() - file_ = None - - if ( - not is_url(link) - and not is_magnet(link) - and not await aiopath.exists(link) - and not is_rclone_path(link) - and file_ is None - ): - reply_message = await send_message(message, MIRROR_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) - return None + x = await send_message(self.message, f"ERROR: {e}") + self.rm_from_sm_dir() + await delete_links(self.message) + return await five_minute_del(x) - error_msg = [] - error_button = None - if await nsfw_precheck(message): - error_msg.extend(["NSFW detected"]) - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - if error_msg: - final_msg = f"Hey, {tag}!\n" - for __i, __msg in enumerate(error_msg, 1): - final_msg += f"\n{__i}: {__msg}" - if error_button is not None: - error_button = error_button.column(2) - await delete_links(message) - force_m = await send_message(message, final_msg, error_button) - await five_minute_del(force_m) - return None + if isinstance(reply_to, list): + self.bulk = reply_to + self.same_dir = {} + b_msg = input_list[:1] + self.options = " ".join(input_list[1:]) + b_msg.append(f"{self.bulk[0]} -i {len(self.bulk)} {self.options}") + nextmsg = await send_message(self.message, " ".join(b_msg)) + nextmsg = await self.client.get_messages( + chat_id=self.message.chat.id, message_ids=nextmsg.id + ) + if self.message.from_user: + nextmsg.from_user = self.user + else: + nextmsg.sender_chat = self.user + Mirror( + self.client, + nextmsg, + self.isQbit, + self.is_leech, + self.same_dir, + self.bulk, + self.multi_tag, + self.options, + ).new_event() + return await delete_links(self.message) - if ( - not is_mega_link(link) - and not is_qbit - and not is_magnet(link) - and not is_rclone_path(link) - and not is_gdrive_link(link) - and not link.endswith(".torrent") - and file_ is None - ): - content_type = await get_content_type(link) - if content_type is None or re_match(r"text/html|text/plain", content_type): - process_msg = await send_message( - message, f"Processing:{link}
" + if reply_to: + file_ = ( + reply_to.document + or reply_to.photo + or reply_to.video + or reply_to.audio + or reply_to.voice + or reply_to.video_note + or reply_to.sticker + or reply_to.animation + or None ) - try: - link = await sync_to_async(direct_link_generator, link) - if isinstance(link, tuple): - link, headers = link - elif isinstance(link, str): - LOGGER.info(f"Generated link: {link}") - except DirectDownloadLinkError as e: - LOGGER.info(str(e)) - if str(e).startswith("ERROR:"): - await edit_message(process_msg, str(e)) - await delete_links(message) - await one_minute_del(process_msg) - return None - await delete_message(process_msg) - - if not is_leech: - if config_dict["DEFAULT_UPLOAD"] == "rc" and not up or up == "rc": - up = config_dict["RCLONE_PATH"] - if not up and config_dict["DEFAULT_UPLOAD"] == "gd": - up = "gd" - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - if drive_id and not await sync_to_async( - GoogleDriveHelper().getFolderData, drive_id + + if file_ is None: + if reply_text := reply_to.text: + self.link = reply_text.split("\n", 1)[0].strip() + else: + reply_to = None + elif reply_to.document and ( + file_.mime_type == "application/x-bittorrent" + or file_.file_name.endswith((".torrent", ".dlc")) ): - return await send_message( - message, "Google Drive ID validation failed!!" - ) - if up == "gd" and not config_dict["GDRIVE_ID"] and not drive_id: - await send_message(message, "GDRIVE_ID not Provided!") - return None - if not up: - await send_message(message, "No Rclone Destination!") - return None - if up not in ["rcl", "gd"]: - if up.startswith("mrcc:"): - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" - if not await aiopath.exists(config_path): - await send_message( - message, f"Rclone Config: {config_path} not Exists!" - ) - return None - if up != "gd" and not is_rclone_path(up): - await send_message(message, "Wrong Rclone Upload Destination!") - await delete_links(message) - return None + self.link = await reply_to.download() + file_ = None - if link == "rcl": - link = await RcloneList(client, message).get_rclone_path("rcd") - if not is_rclone_path(link): - await send_message(message, link) - await delete_links(message) - return None + if self.link and ( + is_magnet(self.link) + or self.link.endswith(".torrent") + or (file_ and file_.file_name.endswith(".torrent")) + ): + self.isQbit = True - if up == "rcl" and not is_leech: - up = await RcloneList(client, message).get_rclone_path("rcu") - if not is_rclone_path(up): - await send_message(message, up) - await delete_links(message) - return None + if ( + not self.link + and file_ is None + or is_telegram_link(self.link) + and reply_to is None + or file_ is None + and not is_url(self.link) + and not is_magnet(self.link) + and not is_mega_link(self.link) + and not await aiopath.exists(self.link) + and not is_rclone_path(self.link) + and not is_gdrive_id(self.link) + and not is_gdrive_link(self.link) + ): + x = await send_message( + self.message, COMMAND_USAGE["mirror"][0], COMMAND_USAGE["mirror"][1] + ) + self.rm_from_sm_dir() + await delete_links(self.message) + return await five_minute_del(x) - listener = MirrorLeechListener( - message, - compress, - extract, - is_qbit, - is_leech, - tag, - select, - seed, - same_dir, - rcf, - up, - join, - drive_id=drive_id, - index_link=index_link, - attachment=attachment, - files_utils={"screenshots": sshots, "thumb": thumb}, - ) + if self.link: + LOGGER.info(self.link) - if file_ is not None: - await delete_links(message) - await TelegramDownloadHelper(listener).add_download( - reply_to, f"{path}/", name, session - ) - elif isinstance(link, dict): - await add_direct_download(link, path, listener, name) - elif is_rclone_path(link): - if link.startswith("mrcc:"): - link = link.split("mrcc:", 1)[1] - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" - if not await aiopath.exists(config_path): - await send_message(message, f"Rclone Config: {config_path} not Exists!") - return None - await add_rclone_download(link, config_path, f"{path}/", name, listener) - elif is_gdrive_link(link): - await delete_links(message) - await add_gd_download(link, path, listener, name) - elif is_mega_link(link): - await delete_links(message) - await add_mega_download(link, f"{path}/", listener, name) - elif is_qbit: - await add_qb_torrent(link, path, listener, ratio, seed_time) - LOGGER.info("Downloading with qbitEngine") - else: - ussr = args["-u"] - pssw = args["-p"] - if ussr or pssw: - auth = f"{ussr}:{pssw}" - headers += ( - f" authorization: Basic {b64encode(auth.encode()).decode('ascii')}" + try: + await self.beforeStart() + except Exception as e: + x = await send_message(self.message, e) + self.rm_from_sm_dir() + await delete_links(self.message) + return await five_minute_del(x) + + if ( + not self.isQbit + and not is_mega_link(self.link) + and not is_magnet(self.link) + and not is_rclone_path(self.link) + and not is_gdrive_link(self.link) + and not self.link.endswith(".torrent") + and file_ is None + and not is_gdrive_id(self.link) + ): + content_type = await get_content_type(self.link) + if content_type is None or re_match( + r"text/html|text/plain", content_type + ): + try: + self.link = await sync_to_async(direct_link_generator, self.link) + if isinstance(self.link, tuple): + self.link, headers = self.link + elif isinstance(self.link, str): + LOGGER.info(f"Generated link: {self.link}") + except DirectDownloadLinkException as e: + e = str(e) + if "This link requires a password!" not in e: + LOGGER.info(e) + if e.startswith("ERROR:"): + x = await send_message(self.message, e) + self.rm_from_sm_dir() + await delete_links(self.message) + return await five_minute_del(x) + + if file_ is not None: + create_task( + TelegramDownloadHelper(self).add_download( + reply_to, f"{path}/", session + ) ) - await add_aria2c_download( - link, path, listener, name, headers, ratio, seed_time - ) - await delete_links(message) - return None + elif isinstance(self.link, dict): + create_task(add_direct_download(self, path)) + elif self.isQbit: + create_task(add_qb_torrent(self, path, ratio, seed_time)) + elif is_rclone_path(self.link): + create_task(add_rclone_download(self, f"{path}/")) + elif is_gdrive_link(self.link) or is_gdrive_id(self.link): + create_task(add_gd_download(self, path)) + elif is_mega_link(self.link): + create_task(add_mega_download(self, f"{path}/")) + else: + ussr = args["-au"] + pssw = args["-ap"] + if ussr or pssw: + auth = f"{ussr}:{pssw}" + headers += f" authorization: Basic {b64encode(auth.encode()).decode('ascii')}" + create_task(add_aria2c_download(self, path, headers, ratio, seed_time)) + await delete_links(self.message) + return None async def mirror(client, message): - _mirror_leech(client, message) + Mirror(client, message).new_event() async def leech(client, message): - _mirror_leech(client, message, is_leech=True) + Mirror(client, message, is_leech=True).new_event() bot.add_handler( diff --git a/bot/modules/shell.py b/bot/modules/shell.py index 99957ad1c..149677ccc 100644 --- a/bot/modules/shell.py +++ b/bot/modules/shell.py @@ -38,11 +38,11 @@ async def shell(_, message): bot.add_handler( MessageHandler( - shell, filters=command(BotCommands.ShellCommand) & CustomFilters.sudo + shell, filters=command(BotCommands.ShellCommand) & CustomFilters.owner ) ) bot.add_handler( EditedMessageHandler( - shell, filters=command(BotCommands.ShellCommand) & CustomFilters.sudo + shell, filters=command(BotCommands.ShellCommand) & CustomFilters.owner ) ) diff --git a/bot/modules/status.py b/bot/modules/status.py index ecb5dd256..284899c9c 100644 --- a/bot/modules/status.py +++ b/bot/modules/status.py @@ -5,17 +5,16 @@ from pyrogram.handlers import MessageHandler, CallbackQueryHandler from bot import ( - Interval, + DOWNLOAD_DIR, + Intervals, bot, - download_dict, - bot_start_time, - download_dict_lock, - status_reply_dict_lock, + task_dict, + status_dict, + botStartTime, + task_dict_lock, ) -from bot.helper.ext_utils.bot_utils import ( - SetInterval, - new_task, - turn_page, +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.status_utils import ( get_readable_time, get_readable_file_size, ) @@ -24,45 +23,54 @@ from bot.helper.telegram_helper.message_utils import ( send_message, delete_message, - one_minute_del, sendStatusMessage, - update_all_messages, + auto_delete_message, + update_status_message, ) @new_task async def mirror_status(_, message): - async with download_dict_lock: - count = len(download_dict) - + async with task_dict_lock: + count = len(task_dict) if count == 0: - current_time = get_readable_time(time() - bot_start_time) - free = get_readable_file_size(disk_usage("/usr/src/app/downloads/").free) + currentTime = get_readable_time(time() - botStartTime) + free = get_readable_file_size(disk_usage(DOWNLOAD_DIR).free) msg = "No downloads are currently in progress.\n" - msg += f"\n• Bot uptime: {current_time}" - msg += f"\n• Free disk space: {free}" - + msg += f"\nBot uptime: {currentTime}" + msg += f"\nFree disk space: {free}" reply_message = await send_message(message, msg) - await delete_message(message) - await one_minute_del(reply_message) + await auto_delete_message(message, reply_message) else: - await sendStatusMessage(message) + text = message.text.split() + if len(text) > 1: + user_id = message.from_user.id if text[1] == "me" else int(text[1]) + else: + user_id = 0 + sid = message.chat.id + if obj := Intervals["status"].get(sid): + obj.cancel() + del Intervals["status"][sid] + await sendStatusMessage(message, user_id) await delete_message(message) - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - Interval.append(SetInterval(1, update_all_messages)) @new_task async def status_pages(_, query): - await query.answer() data = query.data.split() - if data[1] == "ref": - await update_all_messages(True) - else: - await turn_page(data) + key = int(data[1]) + if data[2] in ["nex", "pre"]: + await query.answer() + async with task_dict_lock: + if data[2] == "nex": + status_dict[key]["page_no"] += status_dict[key]["page_step"] + else: + status_dict[key]["page_no"] -= status_dict[key]["page_step"] + elif data[2] == "st": + await query.answer() + async with task_dict_lock: + status_dict[key]["status"] = data[3] + await update_status_message(key, force=True) bot.add_handler( diff --git a/bot/modules/torrent_search.py b/bot/modules/torrent_search.py index 7e7d8017d..c8f081f1e 100644 --- a/bot/modules/torrent_search.py +++ b/bot/modules/torrent_search.py @@ -1,25 +1,18 @@ -import contextlib from html import escape from urllib.parse import quote -from aiohttp import ClientSession from pyrogram.filters import regex, command from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from bot import LOGGER, bot, config_dict, xnox_client -from bot.helper.ext_utils.bot_utils import ( - new_task, - new_thread, - sync_to_async, - checking_access, - get_readable_file_size, -) +from bot import LOGGER, bot, xnox_client +from bot.helper.ext_utils.bot_utils import new_task, new_thread, sync_to_async +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.aeon_utils.access_check import token_check from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.ext_utils.telegraph_helper import telegraph from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( - isAdmin, delete_links, edit_message, send_message, @@ -42,7 +35,6 @@ "uniondht", "yts", ] -SITES = None TELEGRAPH_LIMIT = 300 src_plugins = { "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py", @@ -68,156 +60,52 @@ async def initiate_search_tools(): await sync_to_async(xnox_client.search_uninstall_plugin, names=names) await sync_to_async(xnox_client.search_install_plugin, src_plugins) - if SEARCH_API_LINK := config_dict["SEARCH_API_LINK"]: - global SITES # noqa: PLW0603 - try: - async with ( - ClientSession(trust_env=True) as c, - c.get(f"{SEARCH_API_LINK}/api/v1/sites") as res, - ): - data = await res.json() - SITES = { - str(site): str(site).capitalize() for site in data["supported_sites"] - } - SITES["all"] = "All" - except Exception as e: - LOGGER.error( - f"{e} Can't fetching sites from SEARCH_API_LINK make sure use latest version of API" - ) - SITES = None - async def __search(key, site, message, method): - if method.startswith("api"): - SEARCH_API_LINK = config_dict["SEARCH_API_LINK"] - SEARCH_LIMIT = config_dict["SEARCH_LIMIT"] - if method == "apisearch": - LOGGER.info(f"API Searching: {key} from {site}") - if site == "all": - api = f"{SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={SEARCH_LIMIT}" - else: - api = f"{SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={SEARCH_LIMIT}" - elif method == "apitrend": - LOGGER.info(f"API Trending from {site}") - if site == "all": - api = f"{SEARCH_API_LINK}/api/v1/all/trending?limit={SEARCH_LIMIT}" - else: - api = f"{SEARCH_API_LINK}/api/v1/trending?site={site}&limit={SEARCH_LIMIT}" - elif method == "apirecent": - LOGGER.info(f"API Recent from {site}") - if site == "all": - api = f"{SEARCH_API_LINK}/api/v1/all/recent?limit={SEARCH_LIMIT}" - else: - api = f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}" - try: - async with ClientSession(trust_env=True) as c, c.get(api) as res: - search_results = await res.json() - if "error" in search_results or search_results["total"] == 0: - await edit_message( - message, - f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", - ) - return - msg = f"Found {min(search_results['total'], TELEGRAPH_LIMIT)}" - if method == "apitrend": - msg += ( - f" trending result(s)\nTorrent Site:- {SITES.get(site)}" - ) - elif method == "apirecent": - msg += f" recent result(s)\nTorrent Site:- {SITES.get(site)}" - else: - msg += ( - f" result(s) for {key}\nTorrent Site:- {SITES.get(site)}" - ) - search_results = search_results["data"] - except Exception as e: - await edit_message(message, str(e)) - return - else: - LOGGER.info(f"PLUGINS Searching: {key} from {site}") - search = await sync_to_async( - xnox_client.search_start, pattern=key, plugins=site, category="all" + LOGGER.info(f"PLUGINS Searching: {key} from {site}") + search = await sync_to_async( + xnox_client.search_start, pattern=key, plugins=site, category="all" + ) + search_id = search.id + while True: + result_status = await sync_to_async( + xnox_client.search_status, search_id=search_id ) - search_id = search.id - while True: - result_status = await sync_to_async( - xnox_client.search_status, search_id=search_id - ) - status = result_status[0].status - if status != "Running": - break - dict_search_results = await sync_to_async( - xnox_client.search_results, search_id=search_id, limit=TELEGRAPH_LIMIT + status = result_status[0].status + if status != "Running": + break + dict_search_results = await sync_to_async( + xnox_client.search_results, search_id=search_id, limit=TELEGRAPH_LIMIT + ) + search_results = dict_search_results.results + total_results = dict_search_results.total + if total_results == 0: + await edit_message( + message, f"No result found for {key}\nTorrent Site:- {site.capitalize()}" ) - search_results = dict_search_results.results - total_results = dict_search_results.total - if total_results == 0: - await edit_message( - message, - f"No result found for {key}\nTorrent Site:- {site.capitalize()}", - ) - return - msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}" - msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}" - await sync_to_async(xnox_client.search_delete, search_id=search_id) - link = await __getResult(search_results, key, message, method) + return + msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}" + msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}" + await sync_to_async(xnox_client.search_delete, search_id=search_id) + link = await __getResult(search_results, key, message) buttons = ButtonMaker() buttons.url("View", link) - button = buttons.column(1) + button = buttons.menu(1) await edit_message(message, msg, button) -async def __getResult(search_results, key, message, method): +async def __getResult(search_results, key, message): telegraph_content = [] - if method == "apirecent": - msg = "API Recent Results
" - elif method == "apisearch": - msg = f"API Search Result(s) For {key}
" - elif method == "apitrend": - msg = "API Trending Results
" - else: - msg = f"PLUGINS Search Result(s) For {key}
" + msg = f"PLUGINS Search Result(s) For {key}
" for index, result in enumerate(search_results, start=1): - if method.startswith("api"): - try: - if "name" in result: - msg += f"{escape(result['name'])}
" - if "torrents" in result: - for subres in result["torrents"]: - msg += f"Quality: {subres['quality']} | Type: {subres['type']} | " - msg += f"Size: {subres['size']}
" - if "torrent" in subres: - msg += ( - f"Direct Link
" - ) - elif "magnet" in subres: - msg += "Share Magnet to " - msg += f"Telegram
" - msg += "
" - else: - msg += f"Size: {result['size']}
" - with contextlib.suppress(Exception): - msg += f"Seeders: {result['seeders']} | Leechers: {result['leechers']}
" - if "torrent" in result: - msg += ( - f"Direct Link
" - ) - elif "magnet" in result: - msg += "Share Magnet to " - msg += f"Telegram
" - else: - msg += "
" - except Exception: - continue + msg += f"{escape(result.fileName)}
" + msg += f"Size: {get_readable_file_size(result.fileSize)}
" + msg += f"Seeders: {result.nbSeeders} | Leechers: {result.nbLeechers}
" + link = result.fileUrl + if link.startswith("magnet:"): + msg += f"Share Magnet to Telegram
" else: - msg += f"{escape(result.fileName)}
" - msg += f"Size: {get_readable_file_size(result.fileSize)}
" - msg += f"Seeders: {result.nbSeeders} | Leechers: {result.nbLeechers}
" - link = result.fileUrl - if link.startswith("magnet:"): - msg += f"Share Magnet to Telegram
" - else: - msg += f"Direct Link
" + msg += f"Direct Link
" if len(msg.encode("utf-8")) > 39000: telegraph_content.append(msg) @@ -247,14 +135,6 @@ async def __getResult(search_results, key, message, method): return f"https://telegra.ph/{path[0]}" -def __api_buttons(user_id, method): - buttons = ButtonMaker() - for data, name in SITES.items(): - buttons.callback(name, f"torser {user_id} {data} {method}") - buttons.callback("Cancel", f"torser {user_id} cancel") - return buttons.column(2) - - async def __plugin_buttons(user_id): buttons = ButtonMaker() for siteName in PLUGINS: @@ -263,7 +143,7 @@ async def __plugin_buttons(user_id): ) buttons.callback("All", f"torser {user_id} all plugin") buttons.callback("Cancel", f"torser {user_id} cancel") - return buttons.column(2) + return buttons.menu(2) @new_thread @@ -271,42 +151,24 @@ async def torrentSearch(_, message): user_id = message.from_user.id buttons = ButtonMaker() key = message.text.split() - if ( - not await isAdmin(message, user_id) - and message.chat.type != message.chat.type.PRIVATE - ): - msg, buttons = await checking_access(user_id, buttons) + if message.chat.type != message.chat.type.PRIVATE: + msg, buttons = await token_check(user_id, buttons) if msg is not None: - reply_message = await send_message(message, msg, buttons.column(1)) + reply_message = await send_message(message, msg, buttons.menu(1)) await delete_links(message) await five_minute_del(reply_message) return - if len(key) == 1 and SITES is None: + if len(key) == 1: reply_message = await send_message( message, "Send a search key along with command" ) await one_minute_del(reply_message) await delete_links(message) return - if len(key) == 1: - buttons.callback("Trending", f"torser {user_id} apitrend") - buttons.callback("Recent", f"torser {user_id} apirecent") - buttons.callback("Cancel", f"torser {user_id} cancel") - button = buttons.column(2) - reply_message = await send_message( - message, "Send a search key along with command", button - ) - elif SITES is not None: - buttons.callback("Api", f"torser {user_id} apisearch") - buttons.callback("Plugins", f"torser {user_id} plugin") - buttons.callback("Cancel", f"torser {user_id} cancel") - button = buttons.column(2) - reply_message = await send_message(message, "Choose tool to search:", button) - else: - button = await __plugin_buttons(user_id) - reply_message = await send_message( - message, "Choose site to search | Plugins:", button - ) + button = await __plugin_buttons(user_id) + reply_message = await send_message( + message, "Choose site to search | Plugins:", button + ) await five_minute_del(reply_message) await delete_links(message) @@ -320,10 +182,6 @@ async def torrentSearchUpdate(_, query): data = query.data.split() if user_id != int(data[1]): await query.answer("Not Yours!", show_alert=True) - elif data[2].startswith("api"): - await query.answer() - button = __api_buttons(user_id, data[2]) - await edit_message(message, "Choose site:", button) elif data[2] == "plugin": await query.answer() button = await __plugin_buttons(user_id) @@ -331,28 +189,11 @@ async def torrentSearchUpdate(_, query): elif data[2] != "cancel": await query.answer() site = data[2] - method = data[3] - if method.startswith("api"): - if key is None: - if method == "apirecent": - endpoint = "Recent" - elif method == "apitrend": - endpoint = "Trending" - await edit_message( - message, - f"Listing {endpoint} Items...\nTorrent Site:- {SITES.get(site)}", - ) - else: - await edit_message( - message, - f"Searching for {key}\nTorrent Site:- {SITES.get(site)}", - ) - else: - await edit_message( - message, - f"Searching for {key}\nTorrent Site:- {site.capitalize()}", - ) - await __search(key, site, message, method) + await edit_message( + message, + f"Searching for {key}\nTorrent Site:- {site.capitalize()}", + ) + await __search(key, site, message, "plugin") else: await query.answer() await edit_message(message, "Search has been canceled!") diff --git a/bot/modules/torrent_select.py b/bot/modules/torrent_select.py deleted file mode 100644 index 18818ff86..000000000 --- a/bot/modules/torrent_select.py +++ /dev/null @@ -1,98 +0,0 @@ -import contextlib - -from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove -from pyrogram.filters import regex -from pyrogram.handlers import CallbackQueryHandler - -from bot import LOGGER, bot, aria2, xnox_client -from bot.helper.ext_utils.bot_utils import sync_to_async, get_task_by_gid -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.message_utils import sendStatusMessage - - -async def handle_query(client, query): - user_id = query.from_user.id - data = query.data.split() - message = query.message - download = await get_task_by_gid(data[2]) - - if not download: - await query.answer("This task has been cancelled!", show_alert=True) - await message.delete() - return - - listener = getattr(download, "listener", None) - if not listener: - await query.answer( - "Not in download state anymore! Keep this message to resume the seed if seed enabled!", - show_alert=True, - ) - return - - if ( - user_id != listener().message.from_user.id - and not await CustomFilters.sudo_user(client, query) - ): - await query.answer("This task is not for you!", show_alert=True) - return - - action = data[1] - if action == "pin": - await query.answer(data[3], show_alert=True) - elif action == "done": - await handle_done_action(data[3], download, message, query) - elif action == "rm": - await download.download().cancel_download() - await message.delete() - - -async def handle_done_action(id_, download, message, query): - await query.answer() - - if len(id_) > 20: - await handle_torrent_done(id_, download) - else: - await handle_aria2_done(id_, download) - - await sendStatusMessage(message) - await message.delete() - - -async def handle_torrent_done(torrent_hash, download): - client = xnox_client - torrent_info = ( - await sync_to_async(client.torrents_info, torrent_hash=torrent_hash) - )[0] - path = torrent_info.content_path.rsplit("/", 1)[0] - files = await sync_to_async(client.torrents_files, torrent_hash=torrent_hash) - - for file in files: - if file.priority == 0: - for file_path in [f"{path}/{file.name}", f"{path}/{file.name}.!qB"]: - if await aiopath.exists(file_path): - with contextlib.suppress(Exception): - await aioremove(file_path) - - if not download.queued: - await sync_to_async(client.torrents_resume, torrent_hashes=torrent_hash) - - -async def handle_aria2_done(gid, download): - files = await sync_to_async(aria2.client.get_files, gid) - - for file in files: - if file["selected"] == "false" and await aiopath.exists(file["path"]): - with contextlib.suppress(Exception): - await aioremove(file["path"]) - - if not download.queued: - try: - await sync_to_async(aria2.client.unpause, gid) - except Exception as e: - LOGGER.error( - f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!" - ) - - -bot.add_handler(CallbackQueryHandler(handle_query, filters=regex("^btsel"))) diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py index 30cbf1e01..b1f277697 100644 --- a/bot/modules/users_settings.py +++ b/bot/modules/users_settings.py @@ -1,27 +1,25 @@ from io import BytesIO -from os import path as ospath from os import getcwd from html import escape from time import time from asyncio import sleep from functools import partial -from PIL import Image from aiofiles.os import path as aiopath -from aiofiles.os import mkdir -from aiofiles.os import remove as aioremove +from aiofiles.os import remove, makedirs from pyrogram.filters import regex, create, command from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from bot import DATABASE_URL, IS_PREMIUM_USER, bot, user_data, config_dict -from bot.helper.ext_utils.bot_utils import ( - new_thread, - sync_to_async, - is_gdrive_link, - update_user_ldata, +from bot import ( + MAX_SPLIT_SIZE, + GLOBAL_EXTENSION_FILTER, + bot, + user_data, + config_dict, ) -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.ext_utils.help_strings import uset_display_dict +from bot.helper.ext_utils.bot_utils import new_thread, update_user_ldata +from bot.helper.ext_utils.db_handler import Database +from bot.helper.ext_utils.media_utils import createThumb from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_build import ButtonMaker @@ -29,345 +27,187 @@ sendFile, edit_message, send_message, - sendCustomMsg, delete_message, five_minute_del, ) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper handler_dict = {} -fname_dict = { - "rcc": "RClone", - "prefix": "Prefix", - "suffix": "Suffix", - "remname": "Remname", - "ldump": "Dump", - "user_tds": "User Custom TDs", - "lcaption": "Caption", - "thumb": "Thumbnail", - "metadata": "Metadata", - "attachment": "Attachment", - "yt_opt": "YT-DLP Options", -} - - -async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None): + + +async def get_user_settings(from_user): user_id = from_user.id - name = from_user.mention(style="html") buttons = ButtonMaker() - thumbpath = f"Thumbnails/{user_id}.jpg" - rclone_path = f"tanha/{user_id}.conf" - user_dict = user_data.get(user_id, {}) - if key is None: - buttons.callback("Universal", f"userset {user_id} universal") - buttons.callback("Mirror", f"userset {user_id} mirror") - buttons.callback("Leech", f"userset {user_id} leech") - if user_dict and any( - key in user_dict - for key in [ - "prefix", - "suffix", - "remname", - "ldump", - "yt_opt", - "media_group", - "rclone", - "thumb", - "as_doc", - "metadata", - "attachment", - ] - ): - buttons.callback("Reset", f"userset {user_id} reset_all") - buttons.callback("Close", f"userset {user_id} close") - text = f"User Settings for {name}" - button = buttons.column(2) - elif key == "universal": - buttons.callback("YT-DLP Options", f"userset {user_id} yt_opt") - ytopt = ( - "Not Exists" - if ( - val := user_dict.get("yt_opt", config_dict.get("YT_DLP_OPTIONS", "")) - ) - == "" - else val - ) - buttons.callback("Prefix", f"userset {user_id} prefix") - prefix = user_dict.get("prefix", "Not Exists") - - buttons.callback("Suffix", f"userset {user_id} suffix") - suffix = user_dict.get("suffix", "Not Exists") - - buttons.callback("Remname", f"userset {user_id} remname") - remname = user_dict.get("remname", "Not Exists") - - buttons.callback("Metadata", f"userset {user_id} metadata") - metadata = user_dict.get("metadata", "Not Exists") - - buttons.callback("Attachment", f"userset {user_id} attachment") - attachment = user_dict.get("attachment", "Not Exists") - - text = f"Universal Settings for {name}\n\n" - text += f"• YT-DLP Options:{ytopt}
\n" - text += f"• Prefix:{prefix}
\n" - text += f"• Suffix:{suffix}
\n" - text += f"• Metadata:{metadata}
\n" - text += f"• Attachment:{attachment}
\n" - text += f"• Remname:{remname}
" - buttons.callback("Back", f"userset {user_id} back", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - elif key == "mirror": - buttons.callback("RClone", f"userset {user_id} rcc") - rccmsg = "Exists" if await aiopath.exists(rclone_path) else "Not Exists" - tds_mode = "Enabled" if user_dict.get("td_mode") else "Disabled" - buttons.callback("User TDs", f"userset {user_id} user_tds") - - text = f"Mirror Settings for {name}\n\n" - text += f"• Rclone Config: {rccmsg}\n" - text += f"• User TD Mode: {tds_mode}" - - buttons.callback("Back", f"userset {user_id} back", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - elif key == "leech": - if ( - user_dict.get("as_doc", False) - or "as_doc" not in user_dict - and config_dict["AS_DOCUMENT"] - ): - ltype = "DOCUMENT" - buttons.callback("Send As Media", f"userset {user_id} doc") - else: - ltype = "MEDIA" - buttons.callback("Send As Document", f"userset {user_id} doc") - mediainfo = ( - "Enabled" - if user_dict.get("mediainfo", config_dict["SHOW_MEDIAINFO"]) - else "Disabled" - ) - buttons.callback( - "Disable MediaInfo" if mediainfo == "Enabled" else "Enable MediaInfo", - f"userset {user_id} mediainfo", - ) - if config_dict["SHOW_MEDIAINFO"]: - mediainfo = "Force Enabled" - buttons.callback("Thumbnail", f"userset {user_id} thumb") - thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists" + paths = { + "thumbpath": f"Thumbnails/{user_id}.jpg", + "rclone_conf": f"rclone/{user_id}.conf", + "token_pickle": f"tokens/{user_id}.pickle", + } - if user_dict.get("media_group", False) or ( - "media_group" not in user_dict and config_dict["MEDIA_GROUP"] - ): - buttons.callback("Disable Media Group", f"userset {user_id} mgroup") - else: - buttons.callback("Enable Media Group", f"userset {user_id} mgroup") - media_group = ( - "Enabled" - if user_dict.get("media_group", config_dict.get("MEDIA_GROUP")) - else "Disabled" + user_dict = user_data.get(user_id, {}) + settings = { + "rccmsg": "Exists" + if await aiopath.exists(paths["rclone_conf"]) + else "Not Exists", + "tokenmsg": "Exists" + if await aiopath.exists(paths["token_pickle"]) + else "Not Exists", + "default_upload": "Gdrive API" + if (user_dict.get("default_upload", config_dict["DEFAULT_UPLOAD"])) == "gd" + else "Rclone", + "ex_ex": user_dict.get( + "excluded_extensions", + GLOBAL_EXTENSION_FILTER + if "excluded_extensions" not in user_dict + else "None", ) - - buttons.callback("Leech Caption", f"userset {user_id} lcaption") - lcaption = user_dict.get("lcaption", "Not Exists") - - buttons.callback("Leech Dump", f"userset {user_id} ldump") - ldump = "Not Exists" if (val := user_dict.get("ldump", "")) == "" else val - - SPLIT_SIZE = "4GB" if IS_PREMIUM_USER else "2GB" - text = f"Leech Settings for {name}\n\n" - text += f"• Leech split size: {SPLIT_SIZE}\n" - text += f"• Leech Type: {ltype}\n" - text += f"• Custom Thumbnail: {thumbmsg}\n" - text += f"• Media Group: {media_group}\n" - text += f"• Leech Caption:{escape(lcaption)}
\n" - text += f"• Leech Dump:{ldump}
\n" - text += f"• MediaInfo Mode:{mediainfo}
" - - buttons.callback("Back", f"userset {user_id} back", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - elif edit_type: - text = f"{fname_dict[key]} Settings :\n\n" - if key == "rcc": - set_exist = await aiopath.exists(rclone_path) - text += f"rcl.conf File : {'' if set_exist else 'Not'} Exists\n\n" - elif key == "thumb": - set_exist = await aiopath.exists(thumbpath) - text += ( - f"Custom Thumbnail : {'' if set_exist else 'Not'} Exists\n\n" - ) - elif key == "yt_opt": - set_exist = ( - "Not Exists" - if ( - val := user_dict.get( - "yt_opt", config_dict.get("YT_DLP_OPTIONS", "") - ) - ) - == "" - else val - ) - text += f"YT-DLP Options :{escape(set_exist)}
\n\n" - elif key in [ - "prefix", - "remname", - "suffix", - "lcaption", - "ldump", - "metadata", - "attachment", - ]: - set_exist = ( - "Not Exists" if (val := user_dict.get(key, "")) == "" else val - ) - text += f"{fname_dict[key]}: {set_exist}\n\n" - elif key == "user_tds": - set_exist = ( - len(val) if (val := user_dict.get(key, False)) else "Not Exists" - ) - tds_mode = "Enabled" if user_dict.get("td_mode") else "Disabled" - buttons.callback( - "Disable UserTDs" if tds_mode == "Enabled" else "Enable UserTDs", - f"userset {user_id} td_mode", - "header", - ) - text += f"User TD Mode: {tds_mode}\n" - else: - return None - text += f"Description : {uset_display_dict[key][0]}" - if edit_mode: - text += "\n\n" + uset_display_dict[key][1] - buttons.callback("Stop", f"userset {user_id} {key}") - elif key != "user_tds" or set_exist == "Not Exists": - buttons.callback( - "Change" if set_exist and set_exist != "Not Exists" else "Set", - f"userset {user_id} {key} edit", - ) - if set_exist and set_exist != "Not Exists": - if key == "user_tds": - buttons.callback("Show", f"userset {user_id} show_tds", "header") - buttons.callback("Delete", f"userset {user_id} d{key}") - buttons.callback("Back", f"userset {user_id} back {edit_type}", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - return text, button - - -async def update_user_settings( - query, key=None, edit_type=None, edit_mode=None, msg=None -): - msg, button = await get_user_settings(query.from_user, key, edit_type, edit_mode) + or "None", + "meta_msg": user_dict.get("metadata", "None") or "None", + "ns_msg": "Added" if user_dict.get("name_sub", False) else "None", + "ytopt": user_dict.get("yt_opt", config_dict.get("YT_DLP_OPTIONS", "None")) + or "None", + } + + button_labels = [ + ("Leech", f"userset {user_id} leech"), + ("Rclone", f"userset {user_id} rclone"), + ("Gdrive Tools", f"userset {user_id} gdrive"), + ("Excluded Extensions", f"userset {user_id} ex_ex"), + ("Metadata key", f"userset {user_id} metadata_key"), + ("Name Substitute", f"userset {user_id} name_substitute"), + ("YT-DLP Options", f"userset {user_id} yto"), + ("Reset All", f"userset {user_id} reset") if user_dict else None, + ("Close", f"userset {user_id} close"), + ] + + """ ( + f"Upload using {('Gdrive API' if settings['default_upload'] != 'Gdrive API' else 'Rclone')}", + f"userset {user_id} {settings['default_upload']}", + ), + """ # TODO **Default Upload:** {settings['default_upload']} + + for label, callback in filter(None, button_labels): + buttons.callback(label, callback) + + text = f""">Settings + +**Rclone Config:** {settings['rccmsg']} +**Gdrive Token:** {settings['tokenmsg']} +**Name Substitution:** `{settings['ns_msg']}` +**Metadata Title:** `{settings['meta_msg']}` +**Excluded extension:** `{settings['ex_ex']}` +**YT-DLP Options:** `{escape(settings['ytopt'])}` +""" + + return text, buttons.menu(2) + + +async def update_user_settings(query): + msg, button = await get_user_settings(query.from_user) user_id = query.from_user.id thumbnail = f"Thumbnails/{user_id}.jpg" - if not ospath.exists(thumbnail): + if not await aiopath.exists(thumbnail): thumbnail = "https://graph.org/file/73ae908d18c6b38038071.jpg" - await edit_message(query.message, msg, button, thumbnail) + await edit_message(query.message, msg, button, photo=thumbnail, MARKDOWN=True) @new_thread async def user_settings(_, message): - msg, button = await get_user_settings(message.from_user) - user_id = message.from_user.id + from_user = message.from_user + handler_dict[from_user.id] = False + msg, button = await get_user_settings(from_user) + user_id = from_user.id thumbnail = f"Thumbnails/{user_id}.jpg" - if not ospath.exists(thumbnail): + if not await aiopath.exists(thumbnail): thumbnail = "https://graph.org/file/73ae908d18c6b38038071.jpg" - x = await send_message(message, msg, button, thumbnail) + x = await send_message(message, msg, button, photo=thumbnail, MARKDOWN=True) await five_minute_del(message) await delete_message(x) -async def set_yt_options(_, message, pre_event): +async def set_thumb(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - value = message.text - update_user_ldata(user_id, "yt_opt", value) - await message.delete() - await update_user_settings(pre_event, "yt_opt", "universal") - if DATABASE_URL: - await DbManager().update_user_data(user_id) + des_dir = await createThumb(message, user_id) + update_user_ldata(user_id, "thumb", des_dir) + await delete_message(message) + await update_user_settings(pre_event) + await Database().update_user_doc(user_id, "thumb", des_dir) -async def set_custom(_, message, pre_event, key): - user_id = message.from_user.id - handler_dict[user_id] = False - value = message.text - return_key = "leech" - n_key = key - user_dict = user_data.get(user_id, {}) - if key == "user_tds": - user_tds = user_dict.get(key, {}) - for td_item in value.split("\n"): - if td_item == "": - continue - split_ck = td_item.split() - td_details = td_item.rsplit( - maxsplit=( - 2 - if split_ck[-1].startswith("http") - and not is_gdrive_link(split_ck[-1]) - else 1 - if len(split_ck[-1]) > 15 - else 0 - ) - ) - for title in list(user_tds.keys()): - if td_details[0].casefold() == title.casefold(): - del user_tds[title] - if len(td_details) > 1: - if is_gdrive_link(td_details[1].strip()): - td_details[1] = GoogleDriveHelper.getIdFromUrl(td_details[1]) - if await sync_to_async( - GoogleDriveHelper().getFolderData, td_details[1] - ): - user_tds[td_details[0]] = { - "drive_id": td_details[1], - "index_link": td_details[2].rstrip("/") - if len(td_details) > 2 - else "", - } - value = user_tds - return_key = "mirror" - update_user_ldata(user_id, n_key, value) - await message.delete() - await update_user_settings(pre_event, key, return_key, msg=message) - if DATABASE_URL: - await DbManager().update_user_data(user_id) - - -async def set_thumb(_, message, pre_event, key): +async def add_rclone(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - path = "Thumbnails/" - if not await aiopath.isdir(path): - await mkdir(path) - photo_dir = await message.download() - des_dir = ospath.join(path, f"{user_id}.jpg") - await sync_to_async(Image.open(photo_dir).convert("RGB").save, des_dir, "JPEG") - await aioremove(photo_dir) - update_user_ldata(user_id, "thumb", des_dir) - await message.delete() - await update_user_settings(pre_event, key, "leech", msg=message) - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "thumb", des_dir) + rpath = f"{getcwd()}/rclone/" + await makedirs(rpath, exist_ok=True) + des_dir = f"{rpath}{user_id}.conf" + await message.download(file_name=des_dir) + update_user_ldata(user_id, "rclone_config", f"rclone/{user_id}.conf") + await delete_message(message) + await update_user_settings(pre_event) + await Database().update_user_doc(user_id, "rclone_config", des_dir) -async def add_rclone(_, message, pre_event): +async def add_token_pickle(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - path = f"{getcwd()}/tanha/" - if not await aiopath.isdir(path): - await mkdir(path) - des_dir = ospath.join(path, f"{user_id}.conf") + tpath = f"{getcwd()}/tokens/" + await makedirs(tpath, exist_ok=True) + des_dir = f"{tpath}{user_id}.pickle" await message.download(file_name=des_dir) - update_user_ldata(user_id, "rclone", f"tanha/{user_id}.conf") - await message.delete() - await update_user_settings(pre_event, "rcc", "mirror") - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "rclone", des_dir) + update_user_ldata(user_id, "token_pickle", f"tokens/{user_id}.pickle") + await delete_message(message) + await update_user_settings(pre_event) + await Database().update_user_doc(user_id, "token_pickle", des_dir) -async def event_handler(client, query, pfunc, rfunc, photo=False, document=False): +async def delete_path(_, message, pre_event): + user_id = message.from_user.id + handler_dict[user_id] = False + user_dict = user_data.get(user_id, {}) + names = message.text.split() + for name in names: + if name in user_dict["upload_paths"]: + del user_dict["upload_paths"][name] + new_value = user_dict["upload_paths"] + update_user_ldata(user_id, "upload_paths", new_value) + await delete_message(message) + await update_user_settings(pre_event) + await Database().update_user_doc(user_id, "upload_paths", new_value) + + +async def set_option(_, message, pre_event, option): + user_id = message.from_user.id + handler_dict[user_id] = False + value = message.text + if option == "excluded_extensions": + fx = value.split() + value = ["aria2", "!qB"] + for x in fx: + x = x.lstrip(".") + value.append(x.strip().lower()) + elif option == "upload_paths": + user_dict = user_data.get(user_id, {}) + user_dict.setdefault("upload_paths", {}) + lines = value.split("/n") + for line in lines: + data = line.split(maxsplit=1) + if len(data) != 2: + await send_message( + message, "Wrong format! Add", MARKDOWN=True + ) + await update_user_settings(pre_event) + return + name, path = data + user_dict["upload_paths"][name] = path + value = user_dict["upload_paths"] + update_user_ldata(user_id, option, value) + await delete_message(message) + await update_user_settings(pre_event) + await Database().update_user_data(user_id) + + +async def event_handler(client, query, pfunc, photo=False, document=False): user_id = query.from_user.id handler_dict[user_id] = True start_time = time() @@ -387,11 +227,12 @@ async def event_filter(_, __, event): handler = client.add_handler( MessageHandler(pfunc, filters=create(event_filter)), group=-1 ) + while handler_dict[user_id]: await sleep(0.5) if time() - start_time > 60: handler_dict[user_id] = False - await rfunc() + await update_user_settings(query) client.remove_handler(*handler) @@ -401,314 +242,484 @@ async def edit_user_settings(client, query): user_id = from_user.id message = query.message data = query.data.split() + handler_dict[user_id] = False thumb_path = f"Thumbnails/{user_id}.jpg" - rclone_path = f"tanha/{user_id}.conf" + rclone_conf = f"rclone/{user_id}.conf" + token_pickle = f"tokens/{user_id}.pickle" user_dict = user_data.get(user_id, {}) if user_id != int(data[1]): await query.answer("Not Yours!", show_alert=True) - return None - if data[2] in ["universal", "mirror", "leech"]: + elif data[2] in [ + "as_doc", + "stop_duplicate", + ]: + update_user_ldata(user_id, data[2], data[3] == "true") await query.answer() - await update_user_settings(query, data[2]) - return None - if data[2] == "doc": - update_user_ldata(user_id, "as_doc", not user_dict.get("as_doc", False)) + await update_user_settings(query) + await Database().update_user_data(user_id) + elif data[2] in ["thumb", "rclone_config", "token_pickle"]: + if data[2] == "thumb": + fpath = thumb_path + elif data[2] == "rclone_config": + fpath = rclone_conf + else: + fpath = token_pickle + if await aiopath.exists(fpath): + await query.answer() + await remove(fpath) + update_user_ldata(user_id, data[2], "") + await update_user_settings(query) + await Database().update_user_doc(user_id, data[2]) + else: + await query.answer("Old Settings", show_alert=True) + await update_user_settings(query) + elif data[2] in [ + "yt_opt", + "lcaption", + "index_url", + "excluded_extensions", + "name_sub", + "metadata", + "user_dump", + "session_string", + ]: + await query.answer() + update_user_ldata(user_id, data[2], "") + await update_user_settings(query) + await Database().update_user_data(user_id) + elif data[2] in ["rclone_path", "gdrive_id"]: + await query.answer() + if data[2] in user_data.get(user_id, {}): + del user_data[user_id][data[2]] + await update_user_settings(query) + await Database().update_user_data(user_id) + elif data[2] == "leech": await query.answer() - await update_user_settings(query, "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "show_tds": - handler_dict[user_id] = False - user_tds = user_dict.get("user_tds", {}) - msg = "User TD Details\n\n" - for index_no, (drive_name, drive_dict) in enumerate( - user_tds.items(), start=1 + thumbpath = f"Thumbnails/{user_id}.jpg" + buttons = ButtonMaker() + buttons.callback("Thumbnail", f"userset {user_id} sthumb") + thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists" + split_size = MAX_SPLIT_SIZE + buttons.callback("Leech caption", f"userset {user_id} leech_caption") + if user_dict.get("lcaption", False): + lcaption = user_dict["lcaption"] + else: + lcaption = "None" + buttons.callback("Leech Prefix", f"userset {user_id} leech_prefix") + lprefix = user_dict["lprefix"] if user_dict.get("lprefix", False) else "None" + buttons.callback("User dump", f"userset {user_id} u_dump") + if user_dict.get("user_dump", False): + user_dump = user_dict["user_dump"] + else: + user_dump = "None" + buttons.callback("Session string", f"userset {user_id} s_string") + if user_dict.get("session_string", False): + session_string = "Exists" + else: + session_string = "Not exists" + if ( + user_dict.get("as_doc", False) + or "as_doc" not in user_dict + and config_dict["AS_DOCUMENT"] + ): + ltype = "DOCUMENT" + buttons.callback("Send As Media", f"userset {user_id} as_doc false") + else: + ltype = "MEDIA" + buttons.callback("Send As Document", f"userset {user_id} as_doc true") + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + text = f""">Leech Settings + +**Leech Type:** {ltype} +**Custom Thumbnail:** {thumbmsg} +**Leech Split Size:** {split_size} +**Session string:** {session_string} +**User Custom Dump:** `{user_dump}` +**Leech Prefix:** `{lprefix}` +**Leech Caption:** `{lcaption}` +""" + await edit_message(message, text, buttons.menu(2), MARKDOWN=True) + elif data[2] == "rclone": + await query.answer() + buttons = ButtonMaker() + buttons.callback("Rclone Config", f"userset {user_id} rcc") + buttons.callback("Default Rclone Path", f"userset {user_id} rcp") + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + rccmsg = "Exists" if await aiopath.exists(rclone_conf) else "Not Exists" + if user_dict.get("rclone_path", False): + rccpath = user_dict["rclone_path"] + elif RP := config_dict["RCLONE_PATH"]: + rccpath = RP + else: + rccpath = "None" + text = f""">Rclone Settings + +**Rclone Config:** {rccmsg} +**Rclone Path:** `{rccpath}`""" + await edit_message(message, text, buttons.menu(1), MARKDOWN=True) + elif data[2] == "gdrive": + await query.answer() + buttons = ButtonMaker() + buttons.callback("token.pickle", f"userset {user_id} token") + buttons.callback("Default Gdrive ID", f"userset {user_id} gdid") + buttons.callback("Index URL", f"userset {user_id} index") + if ( + user_dict.get("stop_duplicate", False) + or "stop_duplicate" not in user_dict + and config_dict["STOP_DUPLICATE"] ): - msg += f"{index_no}: Name: {drive_name}
\n" - msg += f" Drive ID:{drive_dict['drive_id']}
\n" - msg += f" Index Link:{ind_url if (ind_url := drive_dict['index_link']) else 'Not Provided'}
\n\n" - try: - await sendCustomMsg(user_id, msg) - await query.answer( - "User TDs Successfully Send in your PM", show_alert=True + buttons.callback( + "Disable Stop Duplicate", f"userset {user_id} stop_duplicate false" ) - except Exception: - await query.answer( - "Start the Bot in PM (Private) and Try Again", show_alert=True + sd_msg = "Enabled" + else: + buttons.callback( + "Enable Stop Duplicate", f"userset {user_id} stop_duplicate true" ) - await update_user_settings(query, "user_tds", "mirror") - return None - if data[2] == "dthumb": - handler_dict[user_id] = False - if await aiopath.exists(thumb_path): - await query.answer() - await aioremove(thumb_path) - update_user_ldata(user_id, "thumb", "") - await update_user_settings(query, "thumb", "leech") - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "thumb") - return None - await query.answer("Old Settings", show_alert=True) - await update_user_settings(query, "leech") - return None - if data[2] == "thumb": + sd_msg = "Disabled" + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + tokenmsg = "Exists" if await aiopath.exists(token_pickle) else "Not Exists" + if user_dict.get("gdrive_id", False): + gdrive_id = user_dict["gdrive_id"] + elif GDID := config_dict["GDRIVE_ID"]: + gdrive_id = GDID + else: + gdrive_id = "None" + index = ( + user_dict["index_url"] if user_dict.get("index_url", False) else "None" + ) + text = f""">Gdrive Tools Settings + +**Gdrive Token:** {tokenmsg} +**Gdrive ID:** `{gdrive_id}` +**Index URL:** `{index}` +**Stop Duplicate:** {sd_msg}""" + await edit_message(message, text, buttons.menu(1), MARKDOWN=True) + elif data[2] == "sthumb": await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "leech", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_thumb, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "leech") - await event_handler(client, query, pfunc, rfunc, True) - return None - if data[2] == "yt_opt": + buttons = ButtonMaker() + if await aiopath.exists(thumb_path): + buttons.callback("Delete Thumbnail", f"userset {user_id} thumb") + buttons.callback("Back", f"userset {user_id} leech") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send a photo to save it as custom thumbnail. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(set_thumb, pre_event=query) + await event_handler(client, query, pfunc, True) + elif data[2] == "yto": await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "universal", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_yt_options, pre_event=query) - rfunc = partial(update_user_settings, query, data[2], "universal") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] == "dyt_opt": - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("yt_opt", False) or config_dict["YT_DLP_OPTIONS"]: + buttons.callback( + "Remove YT-DLP Options", f"userset {user_id} yt_opt", "header" + ) + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + rmsg = """ +Send YT-DLP Options. Timeout: 60 sec +Format: key:value|key:value|key:value. +Example: format:bv*+mergeall[vcodec=none]|nocheckcertificate:True +Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options. + """ + await edit_message(message, rmsg, buttons.menu(1), MARKDOWN=True) + pfunc = partial(set_option, pre_event=query, option="yt_opt") + await event_handler(client, query, pfunc) + elif data[2] == "rcc": await query.answer() - update_user_ldata(user_id, "yt_opt", "") - await update_user_settings(query, "yt_opt", "universal") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "td_mode": - handler_dict[user_id] = False - if data[2] == "td_mode" and not user_dict.get("user_tds", False): - return await query.answer( - "Set UserTD first to Enable User TD Mode !", show_alert=True + buttons = ButtonMaker() + if await aiopath.exists(rclone_conf): + buttons.callback( + "Delete rclone.conf", f"userset {user_id} rclone_config" ) + buttons.callback("Back", f"userset {user_id} rclone") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send rclone.conf. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(add_rclone, pre_event=query) + await event_handler(client, query, pfunc, document=True) + elif data[2] == "rcp": await query.answer() - update_user_ldata(user_id, data[2], not user_dict.get(data[2], False)) - await update_user_settings(query, "user_tds", "mirror") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "mediainfo": - handler_dict[user_id] = False - if config_dict["SHOW_MEDIAINFO"]: - return await query.answer( - "Force Enabled! Can't Alter Settings", show_alert=True + buttons = ButtonMaker() + if user_dict.get("rclone_path", False): + buttons.callback("Reset Rclone Path", f"userset {user_id} rclone_path") + buttons.callback("Back", f"userset {user_id} rclone") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send Rclone Path. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(set_option, pre_event=query, option="rclone_path") + await event_handler(client, query, pfunc) + elif data[2] == "token": + await query.answer() + buttons = ButtonMaker() + if await aiopath.exists(token_pickle): + buttons.callback( + "Delete token.pickle", f"userset {user_id} token_pickle" ) + buttons.callback("Back", f"userset {user_id} gdrive") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send token.pickle. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(add_token_pickle, pre_event=query) + await event_handler(client, query, pfunc, document=True) + elif data[2] == "gdid": await query.answer() - update_user_ldata(user_id, data[2], not user_dict.get(data[2], False)) - await update_user_settings(query, "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "mgroup": - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("gdrive_id", False): + buttons.callback("Reset Gdrive ID", f"userset {user_id} gdrive_id") + buttons.callback("Back", f"userset {user_id} gdrive") + buttons.callback("Close", f"userset {user_id} close") + rmsg = "Send Gdrive ID. Timeout: 60 sec" + await edit_message(message, rmsg, buttons.menu(1), MARKDOWN=True) + pfunc = partial(set_option, pre_event=query, option="gdrive_id") + await event_handler(client, query, pfunc) + elif data[2] == "index": await query.answer() - update_user_ldata( - user_id, "media_group", not user_dict.get("media_group", False) + buttons = ButtonMaker() + if user_dict.get("index_url", False): + buttons.callback("Remove Index URL", f"userset {user_id} index_url") + buttons.callback("Back", f"userset {user_id} gdrive") + buttons.callback("Close", f"userset {user_id} close") + rmsg = "Send Index URL. Timeout: 60 sec" + await edit_message(message, rmsg, buttons.menu(1), MARKDOWN=True) + pfunc = partial(set_option, pre_event=query, option="index_url") + await event_handler(client, query, pfunc) + elif data[2] == "leech_prefix": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("lprefix", False): + buttons.callback("Remove Leech Prefix", f"userset {user_id} lprefix") + buttons.callback("Back", f"userset {user_id} leech") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send Leech Filename Prefix. You can add HTML tags. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, ) - await update_user_settings(query, "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "rcc": + pfunc = partial(set_option, pre_event=query, option="lprefix") + await event_handler(client, query, pfunc) + elif data[2] == "leech_caption": await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "mirror", edit_mode) - if not edit_mode: - return None - pfunc = partial(add_rclone, pre_event=query) - rfunc = partial(update_user_settings, query, data[2], "mirror") - await event_handler(client, query, pfunc, rfunc, document=True) - return None - if data[2] == "drcc": - handler_dict[user_id] = False - if await aiopath.exists(rclone_path): - await query.answer() - await aioremove(rclone_path) - update_user_ldata(user_id, "rclone", "") - await update_user_settings(query, "rcc", "mirror") - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "rclone") - return None - await query.answer("Old Settings", show_alert=True) - await update_user_settings(query) - return None - if data[2] == "user_tds": - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("lcaption", False): + buttons.callback("Remove Leech Caption", f"userset {user_id} lcaption") + buttons.callback("Back", f"userset {user_id} leech") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send Leech Filename caption. You can add HTML tags. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(set_option, pre_event=query, option="lcaption") + await event_handler(client, query, pfunc) + elif data[2] == "u_dump": await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "mirror", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_custom, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "mirror") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] in ["prefix", "suffix", "remname", "attachment", "metadata"]: - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("user_dump", False): + buttons.callback("Remove user dump", f"userset {user_id} user_dump") + buttons.callback("Back", f"userset {user_id} leech") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send your custom dump channel starts with -100, bot must be admin in your channel. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(set_option, pre_event=query, option="user_dump") + await event_handler(client, query, pfunc) + elif data[2] == "s_string": await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "universal", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_custom, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "universal") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] in ["lcaption", "ldump"]: - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("session_string", False): + buttons.callback("Remove session", f"userset {user_id} session_string") + buttons.callback("Back", f"userset {user_id} leech") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send your pyrogram V2 session string for download content from private channel or restricted channel. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(set_option, pre_event=query, option="session_string") + await event_handler(client, query, pfunc) + elif data[2] == "ex_ex": await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "leech", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_custom, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "leech") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] in ["dlcaption", "dldump"]: - handler_dict[user_id] = False + buttons = ButtonMaker() + if ( + user_dict.get("excluded_extensions", False) + or "excluded_extensions" not in user_dict + and GLOBAL_EXTENSION_FILTER + ): + buttons.callback( + "Remove Excluded Extensions", + f"userset {user_id} excluded_extensions", + ) + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send exluded extenions seperated by space without dot at beginning. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(set_option, pre_event=query, option="excluded_extensions") + await event_handler(client, query, pfunc) + elif data[2] == "name_substitute": await query.answer() - update_user_ldata(user_id, data[2][1:], "") - await update_user_settings(query, data[2][1:], "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] in ["dprefix", "dsuffix", "dremname", "dmetadata", "dattachment"]: - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("name_sub", False): + buttons.callback("Remove Name Subtitute", f"userset {user_id} name_sub") + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + emsg = r"""Word Subtitions. You can add pattern instead of normal text. Timeout: 60 sec +NOTE: You must add \ before any character, those are the characters: \^$.|?*+()[]{}- +Example-1: text : code : s|mirror : leech|tea : : s|clone +1. text will get replaced by code with sensitive case +2. mirror will get replaced by leech +4. tea will get removed with sensitive case +5. clone will get removed +Example-2: \(text\) | \[test\] : test | \\text\\ : text : s +1. (text) will get removed +2. [test] will get replaced by test +3. \text\ will get replaced by text with sensitive case +""" + emsg += ( + f"Your Current Value is {user_dict.get('name_sub') or 'not added yet!'}" + ) + await edit_message(message, emsg, buttons.menu(1), MARKDOWN=True) + pfunc = partial(set_option, pre_event=query, option="name_sub") + await event_handler(client, query, pfunc) + elif data[2] == "metadata_key": await query.answer() - update_user_ldata(user_id, data[2][1:], "") - await update_user_settings(query, data[2][1:], "universal") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "duser_tds": - handler_dict[user_id] = False + buttons = ButtonMaker() + if user_dict.get("metadata", False): + buttons.callback("Remove Metadata key", f"userset {user_id} metadata") + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + emsg = "Metadata will change MKV video files including all audio, streams, and subtitle titles." + emsg += ( + f"Your Current Value is {user_dict.get('metadata') or 'not added yet!'}" + ) + await edit_message(message, emsg, buttons.menu(1), MARKDOWN=True) + pfunc = partial(set_option, pre_event=query, option="metadata") + await event_handler(client, query, pfunc) + elif data[2] in ["gd", "rc"]: await query.answer() - update_user_ldata(user_id, data[2][1:], {}) - if data[2] == "duser_tds": - update_user_ldata(user_id, "td_mode", False) - await update_user_settings(query, data[2][1:], "mirror") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "back": - handler_dict[user_id] = False + du = "rc" if data[2] == "gd" else "gd" + update_user_ldata(user_id, "default_upload", du) + await update_user_settings(query) + await Database().update_user_data(user_id) + elif data[2] == "upload_paths": await query.answer() - setting = data[3] if len(data) == 4 else None - await update_user_settings(query, setting) - return None - if data[2] == "reset_all": - handler_dict[user_id] = False + buttons = ButtonMaker() + buttons.callback("New Path", f"userset {user_id} new_path") + if user_dict.get(data[2], False): + buttons.callback("Show All Paths", f"userset {user_id} show_path") + buttons.callback("Remove Path", f"userset {user_id} rm_path") + buttons.callback("Back", f"userset {user_id} back") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, "Add or remove upload path.\n", buttons.menu(1), MARKDOWN=True + ) + elif data[2] == "new_path": await query.answer() buttons = ButtonMaker() - buttons.callback("Yes", f"userset {user_id} reset_now y") - buttons.callback("No", f"userset {user_id} reset_now n") - buttons.callback("Close", f"userset {user_id} close", "footer") + buttons.callback("Back", f"userset {user_id} upload_paths") + buttons.callback("Close", f"userset {user_id} close") await edit_message( - message, "Do you want to Reset Settings ?", buttons.column(2) + message, + "Send path name(no space in name) which you will use it as a shortcut and the path/id seperated by space. You can add multiple names and paths separated by new line. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, ) - return None - if data[2] == "reset_now": - handler_dict[user_id] = False - if data[3] == "n": - return await update_user_settings(query) - if await aiopath.exists(thumb_path): - await aioremove(thumb_path) - if await aiopath.exists(rclone_path): - await aioremove(rclone_path) + pfunc = partial(set_option, pre_event=query, option="upload_paths") + await event_handler(client, query, pfunc) + elif data[2] == "rm_path": await query.answer() - update_user_ldata(user_id, None, None) - await update_user_settings(query) - if DATABASE_URL: - await DbManager().update_user_data(user_id) - await DbManager().update_user_doc(user_id, "thumb") - await DbManager().update_user_doc(user_id, "rclone") - return None - if data[2] == "user_del": - user_id = int(data[3]) + buttons = ButtonMaker() + buttons.callback("Back", f"userset {user_id} upload_paths") + buttons.callback("Close", f"userset {user_id} close") + await edit_message( + message, + "Send paths names which you want to delete, separated by space. Timeout: 60 sec", + buttons.menu(1), + MARKDOWN=True, + ) + pfunc = partial(delete_path, pre_event=query) + await event_handler(client, query, pfunc) + elif data[2] == "show_path": await query.answer() - thumb_path = f"Thumbnails/{user_id}.jpg" - rclone_path = f"tanha/{user_id}.conf" - if await aiopath.exists(thumb_path): - await aioremove(thumb_path) - if await aiopath.exists(rclone_path): - await aioremove(rclone_path) - update_user_ldata(user_id, None, None) - if DATABASE_URL: - await DbManager().update_user_data(user_id) - await DbManager().update_user_doc(user_id, "thumb") - await DbManager().update_user_doc(user_id, "rclone") - await edit_message(message, f"Data Reset for {user_id}") - return None - handler_dict[user_id] = False - await query.answer() - await message.reply_to_message.delete() - await message.delete() - return None - - -async def get_user_info(client, id): - try: - return (await client.get_users(id)).mention(style="html") - except Exception: - return "" - - -async def send_users_settings(client, message): - text = message.text.split(maxsplit=1) - userid = text[1] if len(text) > 1 else None - if userid and not userid.isdigit(): - userid = None - elif ( - (reply_to := message.reply_to_message) - and reply_to.from_user - and not reply_to.from_user.is_bot - ): - userid = reply_to.from_user.id - if not userid: - msg = f"Total Users / Chats Data Saved : {len(user_data)}" buttons = ButtonMaker() - buttons.callback("Close", f"userset {message.from_user.id} close") - button = buttons.column(1) - for user, data in user_data.items(): - msg += f"\n\n{user}
:" - if data: - for key, value in data.items(): - if key in ["token", "time"]: - continue - msg += f"\n{key}:{escape(str(value))}
" + buttons.callback("Back", f"userset {user_id} upload_paths") + buttons.callback("Close", f"userset {user_id} close") + user_dict = user_data.get(user_id, {}) + msg = "".join( + f"**{key}**: `{value}`\n" + for key, value in user_dict["upload_paths"].items() + ) + await edit_message(message, msg, buttons.menu(1), MARKDOWN=True) + elif data[2] == "reset": + await query.answer() + if ud := user_data.get(user_id, {}): + if ud and ("is_sudo" in ud or "is_auth" in ud): + for k in list(ud.keys()): + if k not in ["is_sudo", "is_auth"]: + del user_data[user_id][k] else: - msg += "\nUser's Data is Empty!" - if len(msg.encode()) > 4000: - with BytesIO(str.encode(msg)) as ofile: + user_data[user_id].clear() + await update_user_settings(query) + await Database().update_user_data(user_id) + for fpath in [thumb_path, rclone_conf, token_pickle]: + if await aiopath.exists(fpath): + await remove(fpath) + elif data[2] == "back": + await query.answer() + await update_user_settings(query) + else: + await query.answer() + await delete_message(message.reply_to_message) + await delete_message(message) + + +async def send_users_settings(_, message): + if user_data: + msg = "" + for u, d in user_data.items(): + kmsg = f"\n{u}:\n" + if vmsg := "".join( + f"{k}:{v}
\n" for k, v in d.items() if f"{v}" + ): + msg += kmsg + vmsg + + msg_ecd = msg.encode() + if len(msg_ecd) > 4000: + with BytesIO(msg_ecd) as ofile: ofile.name = "users_settings.txt" await sendFile(message, ofile) else: - await send_message(message, msg, button) - elif int(userid) in user_data: - msg = f"{await get_user_info(client, userid)} ({userid}
):" - if data := user_data[int(userid)]: - buttons = ButtonMaker() - buttons.callback( - "Delete", f"userset {message.from_user.id} user_del {userid}" - ) - buttons.callback("Close", f"userset {message.from_user.id} close") - button = buttons.column(1) - for key, value in data.items(): - if key in ["token", "time"]: - continue - msg += f"\n{key}:{escape(str(value))}
" - else: - msg += "\nThis User has not Saved anything." - button = None - await send_message(message, msg, button) + await send_message(message, msg) else: - await send_message(message, f"{userid} have not saved anything..") + await send_message(message, "No users data!") bot.add_handler( diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py index 8de723032..8b5a61aaa 100644 --- a/bot/modules/ytdlp.py +++ b/bot/modules/ytdlp.py @@ -1,34 +1,28 @@ -import contextlib from time import time -from asyncio import Event, sleep, wait_for, wrap_future +from asyncio import Event, wait_for, wrap_future from functools import partial +from httpx import AsyncClient from yt_dlp import YoutubeDL -from aiohttp import ClientSession -from aiofiles.os import path as aiopath from pyrogram.filters import user, regex, command from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from bot import LOGGER, bot, user_data, config_dict +from bot import LOGGER, DOWNLOAD_DIR, bot, config_dict from bot.helper.ext_utils.bot_utils import ( - is_url, + COMMAND_USAGE, new_task, arg_parser, new_thread, sync_to_async, - fetch_user_tds, - is_gdrive_link, - is_rclone_path, +) +from bot.helper.ext_utils.links_utils import is_url +from bot.helper.ext_utils.status_utils import ( get_readable_time, get_readable_file_size, ) -from bot.helper.ext_utils.bulk_links import extract_bulk_links -from bot.helper.aeon_utils.nsfw_check import nsfw_precheck -from bot.helper.aeon_utils.send_react import send_react -from bot.helper.ext_utils.help_strings import YT_HELP_MESSAGE -from bot.helper.ext_utils.task_manager import task_utils +from bot.helper.aeon_utils.access_check import error_check +from bot.helper.listeners.task_listener import TaskListener from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.listeners.tasks_listener import MirrorLeechListener from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( @@ -36,11 +30,8 @@ edit_message, send_message, delete_message, - one_minute_del, five_minute_del, ) -from bot.helper.mirror_leech_utils.rclone_utils.list import RcloneList -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.mirror_leech_utils.download_utils.yt_dlp_download import ( YoutubeDLHelper, ) @@ -69,7 +60,7 @@ async def select_format(_, query, obj): elif data[1] == "cancel": await edit_message(message, "Task has been cancelled.") obj.qual = None - obj.is_cancelled = True + obj.listener.isCancelled = True obj.event.set() else: if data[1] == "sub": @@ -82,47 +73,42 @@ async def select_format(_, query, obj): class YtSelection: - def __init__(self, client, message): - self.__message = message - self.__user_id = message.from_user.id - self.__client = client - self.__is_m4a = False - self.__reply_to = None - self.__time = time() - self.__timeout = 120 - self.__is_playlist = False - self.is_cancelled = False - self.__main_buttons = None + def __init__(self, listener): + self.listener = listener + self._is_m4a = False + self._reply_to = None + self._time = time() + self._timeout = 120 + self._is_playlist = False + self._main_buttons = None self.event = Event() self.formats = {} self.qual = None @new_thread - async def __event_handler(self): + async def _event_handler(self): pfunc = partial(select_format, obj=self) - handler = self.__client.add_handler( + handler = self.listener.client.add_handler( CallbackQueryHandler( - pfunc, filters=regex("^ytq") & user(self.__user_id) + pfunc, filters=regex("^ytq") & user(self.listener.userId) ), group=-1, ) try: - await wait_for(self.event.wait(), timeout=self.__timeout) + await wait_for(self.event.wait(), timeout=self._timeout) except Exception: - await edit_message( - self.__reply_to, "Timed Out. Task has been cancelled!" - ) + await edit_message(self._reply_to, "Timed Out. Task has been cancelled!") self.qual = None - self.is_cancelled = True + self.listener.isCancelled = True self.event.set() finally: - self.__client.remove_handler(*handler) + self.listener.client.remove_handler(*handler) async def get_quality(self, result): - future = self.__event_handler() + future = self._event_handler() buttons = ButtonMaker() if "entries" in result: - self.__is_playlist = True + self._is_playlist = True for i in ["144", "240", "360", "480", "720", "1080", "1440", "2160"]: video_format = ( f"bv*[height<=?{i}][ext=mp4]+ba[ext=m4a]/b[height<=?{i}]" @@ -139,8 +125,8 @@ async def get_quality(self, result): buttons.callback("Best Videos", "ytq bv*+ba/b") buttons.callback("Best Audios", "ytq ba/b") buttons.callback("Cancel", "ytq cancel", "footer") - self.__main_buttons = buttons.column(3) - msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" + self._main_buttons = buttons.menu(3) + msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" else: format_dict = result.get("formats") if format_dict is not None: @@ -155,13 +141,15 @@ async def get_quality(self, result): else: size = 0 - if ( - item.get("video_ext") == "none" - and item.get("acodec") != "none" + if item.get("video_ext") == "none" and ( + item.get("resolution") == "audio only" + or item.get("acodec") != "none" ): if item.get("audio_ext") == "m4a": - self.__is_m4a = True - b_name = f"{item['acodec']}-{item['ext']}" + self._is_m4a = True + b_name = ( + f"{item.get('acodec') or format_id}-{item['ext']}" + ) v_format = format_id elif item.get("height"): height = item["height"] @@ -169,7 +157,7 @@ async def get_quality(self, result): fps = item["fps"] if item.get("fps") else "" b_name = f"{height}p{fps}-{ext}" ba_ext = ( - "[ext=m4a]" if self.__is_m4a and ext == "mp4" else "" + "[ext=m4a]" if self._is_m4a and ext == "mp4" else "" ) v_format = f"{format_id}+ba{ba_ext}/b[height=?{height}]" else: @@ -194,22 +182,22 @@ async def get_quality(self, result): buttons.callback("Best Video", "ytq bv*+ba/b") buttons.callback("Best Audio", "ytq ba/b") buttons.callback("Cancel", "ytq cancel", "footer") - self.__main_buttons = buttons.column(2) - msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - self.__reply_to = await send_message( - self.__message, msg, self.__main_buttons + self._main_buttons = buttons.menu(2) + msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + self._reply_to = await send_message( + self.listener.message, msg, self._main_buttons ) await wrap_future(future) - if not self.is_cancelled: - await delete_message(self.__reply_to) + if not self.listener.isCancelled: + await delete_message(self._reply_to) return self.qual async def back_to_main(self): - if self.__is_playlist: - msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" + if self._is_playlist: + msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" else: - msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, self.__main_buttons) + msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, self._main_buttons) async def qual_subbuttons(self, b_name): buttons = ButtonMaker() @@ -219,12 +207,12 @@ async def qual_subbuttons(self, b_name): buttons.callback(button_name, f"ytq sub {b_name} {tbr}") buttons.callback("Back", "ytq back", "footer") buttons.callback("Cancel", "ytq cancel", "footer") - subbuttons = buttons.column(2) - msg = f"Choose Bit rate for {b_name}:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + subbuttons = buttons.menu(2) + msg = f"Choose Bit rate for {b_name}:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) async def mp3_subbuttons(self): - i = "s" if self.__is_playlist else "" + i = "s" if self._is_playlist else "" buttons = ButtonMaker() audio_qualities = [64, 128, 320] for q in audio_qualities: @@ -232,33 +220,33 @@ async def mp3_subbuttons(self): buttons.callback(f"{q}K-mp3", f"ytq {audio_format}") buttons.callback("Back", "ytq back") buttons.callback("Cancel", "ytq cancel") - subbuttons = buttons.column(3) - msg = f"Choose mp3 Audio{i} Bitrate:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + subbuttons = buttons.menu(3) + msg = f"Choose mp3 Audio{i} Bitrate:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) async def audio_format(self): - i = "s" if self.__is_playlist else "" + i = "s" if self._is_playlist else "" buttons = ButtonMaker() for frmt in ["aac", "alac", "flac", "m4a", "opus", "vorbis", "wav"]: audio_format = f"ba/b-{frmt}-" buttons.callback(frmt, f"ytq aq {audio_format}") buttons.callback("Back", "ytq back", "footer") buttons.callback("Cancel", "ytq cancel", "footer") - subbuttons = buttons.column(3) - msg = f"Choose Audio{i} Format:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + subbuttons = buttons.menu(3) + msg = f"Choose Audio{i} Format:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) async def audio_quality(self, format): - i = "s" if self.__is_playlist else "" + i = "s" if self._is_playlist else "" buttons = ButtonMaker() for qual in range(11): audio_format = f"{format}{qual}" buttons.callback(qual, f"ytq {audio_format}") buttons.callback("Back", "ytq aq back") buttons.callback("Cancel", "ytq aq cancel") - subbuttons = buttons.column(5) - msg = f"Choose Audio{i} Qaulity:\n0 is best and 10 is worst\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + subbuttons = buttons.menu(5) + msg = f"Choose Audio{i} Qaulity:\n0 is best and 10 is worst\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) def extract_info(link, options): @@ -271,295 +259,213 @@ def extract_info(link, options): async def _mdisk(link, name): key = link.split("/")[-1] - async with ( - ClientSession() as session, - session.get( + async with AsyncClient(verify=False) as client: + resp = await client.get( f"https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}" - ) as resp, + ) + if resp.status_code == 200: + resp_json = resp.json() + link = resp_json["source"] + if not name: + name = resp_json["filename"] + return name, link + + +class YtDlp(TaskListener): + def __init__( + self, + client, + message, + _=None, + is_leech=False, + __=None, + ___=None, + same_dir=None, + bulk=None, + multi_tag=None, + options="", ): - if resp.status == 200: - resp_json = await resp.json() - link = resp_json["source"] - if not name: - name = resp_json["filename"] - return name, link - - -@new_task -async def _ytdl(client, message, is_leech=False, same_dir=None, bulk=[]): - await send_react(message) - text = message.text.split("\n") - input_list = text[0].split(" ") - qual = "" - arg_base = { - "link": "", - "-m": "", - "-n": "", - "-opt": "", - "-up": "", - "-rcf": "", - "-id": "", - "-index": "", - "-t": "", - "-s": False, - "-b": False, - "-z": False, - "-i": "0", - "-ss": "0", - } - args = arg_parser(input_list[1:], arg_base) - i = args["-i"] - select = args["-s"] - isBulk = args["-b"] - opt = args["-opt"] - folder_name = args["-m"] - name = args["-n"] - up = args["-up"] - rcf = args["-rcf"] - link = args["link"] - compress = args["-z"] - thumb = args["-t"] - drive_id = args["-id"] - index_link = args["-index"] - ss = args["-ss"] - multi = int(i) if i.isdigit() else 0 - sshots = min(int(ss) if ss.isdigit() else 0, 10) - bulk_start = 0 - bulk_end = 0 - - if not isinstance(isBulk, bool): - dargs = isBulk.split(":") - bulk_start = dargs[0] or None - if len(dargs) == 2: - bulk_end = dargs[1] or None - isBulk = True - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if folder_name and not isBulk: - folder_name = f"/{folder_name}" if same_dir is None: - same_dir = {"total": multi, "tasks": set(), "name": folder_name} - same_dir["tasks"].add(message.id) + same_dir = {} + if bulk is None: + bulk = [] + self.message = message + self.client = client + self.multi_tag = multi_tag + self.options = options + self.same_dir = same_dir + self.bulk = bulk + super().__init__() + self.isYtDlp = True + self.is_leech = is_leech + + @new_task + async def new_event(self): + error_msg, error_button = await error_check(self.message) + await delete_links(self.message) + if error_msg: + error = await send_message(self.message, error_msg, error_button) + await five_minute_del(error) + return + text = self.message.text.split("\n") + input_list = text[0].split(" ") + qual = "" + + args = { + "-s": False, + "-b": False, + "-z": False, + "-sv": False, + "-ss": False, + "-i": 0, + "link": "", + "-m": "", + "-opt": "", + "-n": "", + "-up": "", + "-rcf": "", + "-t": "", + "-ca": "", + "-cv": "", + "-ns": "", + } + + arg_parser(input_list[1:], args) - if isBulk: try: - bulk = await extract_bulk_links(message, bulk_start, bulk_end) - if len(bulk) == 0: - raise ValueError("Bulk Empty!") + self.multi = int(args["-i"]) except Exception: + self.multi = 0 + + self.select = args["-s"] + self.name = args["-n"] + self.upDest = args["-up"] + self.rcFlags = args["-rcf"] + self.link = args["link"] + self.compress = args["-z"] + self.thumb = args["-t"] + self.sampleVideo = args["-sv"] + self.screenShots = args["-ss"] + self.convertAudio = args["-ca"] + self.convertVideo = args["-cv"] + self.nameSub = args["-ns"] + + is_bulk = args["-b"] + folder_name = args["-m"] + + bulk_start = 0 + bulk_end = 0 + reply_to = None + opt = args["-opt"] + + if not isinstance(is_bulk, bool): + dargs = is_bulk.split(":") + bulk_start = dargs[0] or None + if len(dargs) == 2: + bulk_end = dargs[1] or None + is_bulk = True + + if not is_bulk: + if folder_name: + folder_name = f"/{folder_name}" + if not self.same_dir: + self.same_dir = { + "total": self.multi, + "tasks": set(), + "name": folder_name, + } + self.same_dir["tasks"].add(self.mid) + elif self.same_dir: + self.same_dir["total"] -= 1 + else: + await self.initBulk(input_list, bulk_start, bulk_end, YtDlp) + return + + if len(self.bulk) != 0: + del self.bulk[0] + + path = f"{DOWNLOAD_DIR}{self.mid}{folder_name}" + + await self.getTag(text) + + opt = opt or self.userDict.get("yt_opt") or config_dict["YT_DLP_OPTIONS"] + + if not self.link and (reply_to := self.message.reply_to_message): + self.link = reply_to.text.split("\n", 1)[0].strip() + + if not is_url(self.link): await send_message( - message, - "Reply to text file or tg message that have links seperated by new line!", + self.message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][1] ) - return None - b_msg = input_list[:1] - b_msg.append(f"{bulk[0]} -i {len(bulk)}") - nextmsg = await send_message(message, " ".join(b_msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - nextmsg.from_user = message.from_user - _ytdl(client, nextmsg, is_leech, same_dir, bulk) - return None + self.rm_from_sm_dir() + return - if len(bulk) != 0: - del bulk[0] + if "mdisk.me" in self.link: + self.name, self.link = await _mdisk(self.link, self.name) - @new_task - async def __run_multi(): - if multi <= 1: + try: + await self.beforeStart() + except Exception as e: + await send_message(self.message, e) + self.rm_from_sm_dir() return - await sleep(5) - if len(bulk) != 0: - msg = input_list[:1] - msg.append(f"{bulk[0]} -i {multi - 1}") - nextmsg = await send_message(message, " ".join(msg)) - else: - msg = [s.strip() for s in input_list] - index = msg.index("-i") - msg[index + 1] = f"{multi - 1}" - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1 - ) - nextmsg = await send_message(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - if folder_name: - same_dir["tasks"].add(nextmsg.id) - nextmsg.from_user = message.from_user - await sleep(5) - _ytdl(client, nextmsg, is_leech, same_dir, bulk) - - path = f"/usr/src/app/downloads/{message.id}{folder_name}" - - if len(text) > 1 and text[1].startswith("Tag: "): - tag, id_ = text[1].split("Tag: ")[1].split() - message.from_user = await client.get_users(id_) - with contextlib.suppress(Exception): - await message.unpin() - - user_id = message.from_user.id - user_dict = user_data.get(user_id, {}) - opt = opt or user_dict.get("yt_opt") or config_dict["YT_DLP_OPTIONS"] - - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - - if not link and (reply_to := message.reply_to_message): - link = reply_to.text.split("\n", 1)[0].strip() - - if not is_url(link): - reply_message = await send_message(message, YT_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) - return None - - error_msg = [] - error_button = None - if await nsfw_precheck(message): - error_msg.extend(["NSFW detected"]) - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - if error_msg: - final_msg = f"Hey, {tag}!\n" - for __i, __msg in enumerate(error_msg, 1): - final_msg += f"\n{__i}: {__msg}" - if error_button is not None: - error_button = error_button.column(2) - await delete_links(message) - force_m = await send_message(message, final_msg, error_button) - await five_minute_del(force_m) - return None - - if not is_leech: - if config_dict["DEFAULT_UPLOAD"] == "rc" and not up or up == "rc": - up = config_dict["RCLONE_PATH"] - if not up and config_dict["DEFAULT_UPLOAD"] == "gd": - up = "gd" - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - if drive_id and not await sync_to_async( - GoogleDriveHelper().getFolderData, drive_id - ): - return await send_message( - message, "Google Drive ID validation failed!!" - ) - if up == "gd" and not config_dict["GDRIVE_ID"] and not drive_id: - await send_message(message, "GDRIVE_ID not Provided!") - await delete_links(message) - return None - if not up: - await send_message(message, "No Rclone Destination!") - await delete_links(message) - return None - if up not in ["rcl", "gd"]: - if up.startswith("mrcc:"): - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" - if not await aiopath.exists(config_path): - await send_message( - message, f"Rclone Config: {config_path} not Exists!" - ) - await delete_links(message) - return None - if up != "gd" and not is_rclone_path(up): - await send_message(message, "Wrong Rclone Upload Destination!") - await delete_links(message) - return None - - if up == "rcl" and not is_leech: - up = await RcloneList(client, message).get_rclone_path("rcu") - if not is_rclone_path(up): - await send_message(message, up) - await delete_links(message) - return None - - listener = MirrorLeechListener( - message, - compress, - is_leech=is_leech, - tag=tag, - same_dir=same_dir, - rc_flags=rcf, - upPath=up, - drive_id=drive_id, - index_link=index_link, - is_ytdlp=True, - files_utils={"screenshots": sshots, "thumb": thumb}, - ) - if "mdisk.me" in link: - name, link = await _mdisk(link, name) - - options = {"usenetrc": True, "cookiefile": "cookies.txt"} - if opt: - yt_opt = opt.split("|") - for ytopt in yt_opt: - key, value = map(str.strip, ytopt.split(":", 1)) - if key == "format" and value.startswith("ba/b-"): - if select: - qual = "" - elif value.startswith("ba/b-"): - qual = value + options = {"usenetrc": True, "cookiefile": "cookies.txt"} + if opt: + yt_opts = opt.split("|") + for ytopt in yt_opts: + key, value = map(str.strip, ytopt.split(":", 1)) + if key == "postprocessors": continue - if value.startswith("^"): - if "." in value or value == "^inf": - value = float(value.split("^")[1]) - else: - value = int(value.split("^")[1]) - elif value.lower() == "true": - value = True - elif value.lower() == "false": - value = False - elif value.startswith(("{", "[", "(")) and value.endswith( - ("}", "]", ")") - ): - value = eval(value) - options[key] = value - + if key == "format" and not self.select: + if value.startswith("ba/b-"): + qual = value + continue + qual = value + if value.startswith("^"): + if "." in value or value == "^inf": + value = float(value.split("^")[1]) + else: + value = int(value.split("^")[1]) + elif value.lower() == "true": + value = True + elif value.lower() == "false": + value = False + elif value.startswith(("{", "[", "(")) and value.endswith( + ("}", "]", ")") + ): + value = eval(value) + options[key] = value options["playlist_items"] = "0" - try: - result = await sync_to_async(extract_info, link, options) - except Exception as e: - msg = str(e).replace("<", " ").replace(">", " ") - x = await send_message(message, f"{tag} {msg}") - __run_multi() - await delete_links(message) - await five_minute_del(x) - return None - - __run_multi() - - if not select and (not qual and "format" in options): - qual = options["format"] - - if not qual: - qual = await YtSelection(client, message).get_quality(result) - if qual is None: - return None - await delete_links(message) - LOGGER.info(f"Downloading with YT-DLP: {link}") - playlist = "entries" in result - ydl = YoutubeDLHelper(listener) - await ydl.add_download(link, path, name, qual, playlist, opt) - return None + try: + result = await sync_to_async(extract_info, self.link, options) + except Exception as e: + msg = str(e).replace("<", " ").replace(">", " ") + await send_message(self.message, f"{self.tag} {msg}") + self.rm_from_sm_dir() + return + finally: + self.run_multi(input_list, folder_name, YtDlp) + + if not qual: + qual = await YtSelection(self).get_quality(result) + if qual is None: + self.rm_from_sm_dir() + return + + LOGGER.info(f"Downloading with YT-DLP: {self.link}") + playlist = "entries" in result + ydl = YoutubeDLHelper(self) + await ydl.add_download(path, qual, playlist, opt) async def ytdl(client, message): - _ytdl(client, message) + YtDlp(client, message).new_event() async def ytdlleech(client, message): - _ytdl(client, message, is_leech=True) + YtDlp(client, message, is_leech=True).new_event() bot.add_handler( diff --git a/qBittorrent/config/qBittorrent.conf b/qBittorrent/config/qBittorrent.conf index 192ae94d2..38641a31c 100644 --- a/qBittorrent/config/qBittorrent.conf +++ b/qBittorrent/config/qBittorrent.conf @@ -19,9 +19,9 @@ Session\IgnoreSlowTorrentsForQueueing=true Session\IncludeOverheadInLimits=false Session\LSDEnabled=true Session\MaxActiveCheckingTorrents=3 -Session\MaxActiveDownloads=100 -Session\MaxActiveTorrents=50 -Session\MaxActiveUploads=50 +Session\MaxActiveDownloads=1000 +Session\MaxActiveTorrents=1000 +Session\MaxActiveUploads=1000 Session\MaxConnections=-1 Session\MaxConnectionsPerTorrent=-1 Session\MaxRatioAction=0 @@ -42,7 +42,7 @@ TrackerEnabled=true Accepted=true [Meta] -MigrationVersion=4 +MigrationVersion=6 [Preferences] Advanced\DisableRecursiveDownload=false @@ -51,6 +51,7 @@ Advanced\trackerPortForwarding=true General\PreventFromSuspendWhenDownloading=true General\PreventFromSuspendWhenSeeding=true Search\SearchEnabled=true +WebUI\Address=* WebUI\BanDuration=3600 WebUI\CSRFProtection=false WebUI\ClickjackingProtection=false @@ -58,8 +59,9 @@ WebUI\Enabled=true WebUI\HTTPS\Enabled=false WebUI\HostHeaderValidation=false WebUI\LocalHostAuth=false -WebUI\MaxAuthenticationFailCount=10 +WebUI\MaxAuthenticationFailCount=1000 WebUI\Port=8090 WebUI\SecureCookie=false -WebUI\SessionTimeout=3600 -WebUI\UseUPnP=false \ No newline at end of file +WebUI\UseUPnP=false +WebUI\Username=mltb +WebUI\Password_PBKDF2="@ByteArray(yIpb28Jvi2Eefa6QN9Vp4A==:0wDVw0BHXUAyEHjTkqv+m/OuYmm0JtjmonLa4+qasIvdh5QAZ/z0rZWuRuxI3ikNqMLofakj3lrU/+v1wsipGA==)" \ No newline at end of file diff --git a/ruff.toml b/ruff.toml index b4c83b924..33a232f7c 100644 --- a/ruff.toml +++ b/ruff.toml @@ -20,7 +20,6 @@ select = [ "W", # pycodestyle: warning "UP", # pyupgrade "F", # pyflakes - "N", "SIM", # flake8-simplify "RET", # flake8-return "C4", # flake8-comprehensions @@ -31,9 +30,14 @@ select = [ "FA", # flake8-future-annotations "PL", # pylint "ARG", # flake8-unused-arguments + "N", # Naming ] ignore = [ "E501", "PLR2004", + "N806", + "PLR0913", + "PLR0915", + "PLR0912", ] diff --git a/update.py b/update.py index 3d8028046..be2d20b2b 100644 --- a/update.py +++ b/update.py @@ -1,5 +1,4 @@ -import sys -from os import path, environ +from os import path, remove, environ from logging import ( INFO, ERROR, @@ -11,27 +10,27 @@ getLogger, basicConfig, ) +from datetime import datetime from subprocess import run -from dotenv import load_dotenv -from pymongo import MongoClient -from requests import get - -getLogger("pymongo").setLevel(ERROR) -getLogger("httpx").setLevel(ERROR) - -if path.exists("log.txt"): - with open("log.txt", "r+") as f: - f.truncate(0) +from pytz import timezone +from dotenv import load_dotenv, dotenv_values +from pymongo.server_api import ServerApi +from pymongo.mongo_client import MongoClient class CustomFormatter(Formatter): + def formatTime(self, record, datefmt): + dt = datetime.fromtimestamp(record.created, tz=timezone("Asia/Dhaka")) + return dt.strftime(datefmt) + def format(self, record): return super().format(record).replace(record.levelname, record.levelname[:1]) formatter = CustomFormatter( - "[%(asctime)s] [%(levelname)s] - %(message)s", datefmt="%d-%b-%y %I:%M:%S %p" + "[%(asctime)s] [%(levelname)s] %(message)s | [%(module)s:%(lineno)d]", + datefmt="%d-%b %I:%M:%S %p", ) file_handler = FileHandler("log.txt") @@ -42,72 +41,76 @@ def format(self, record): basicConfig(handlers=[file_handler, stream_handler], level=INFO) -CONFIG_FILE_URL = environ.get("CONFIG_FILE_URL") -try: - if len(CONFIG_FILE_URL) == 0: - raise TypeError - try: - res = get(CONFIG_FILE_URL) - if res.status_code == 200: - with open("config.env", "wb+") as f: - f.write(res.content) - else: - error(f"Failed to download config.env {res.status_code}") - except Exception as e: - error(f"CONFIG_FILE_URL: {e}") -except Exception: - pass +getLogger("pymongo").setLevel(ERROR) +getLogger("httpx").setLevel(ERROR) + +if path.exists("log.txt"): + with open("log.txt", "r+") as f: + f.truncate(0) + +if path.exists("rlog.txt"): + remove("rlog.txt") + load_dotenv("config.env", override=True) -BOT_TOKEN = environ.get("BOT_TOKEN", "") -if len(BOT_TOKEN) == 0: - error("BOT_TOKEN variable is missing! Exiting now") - sys.exit(1) +BOT_TOKEN = environ["BOT_TOKEN"] -bot_id = BOT_TOKEN.split(":", 1)[0] +BOT_ID = BOT_TOKEN.split(":", 1)[0] DATABASE_URL = environ.get("DATABASE_URL", "") if len(DATABASE_URL) == 0: DATABASE_URL = None -if DATABASE_URL: - conn = MongoClient(DATABASE_URL) - db = conn.luna - if config_dict := db.settings.config.find_one({"_id": bot_id}): - environ["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] - environ["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"] - conn.close() +if DATABASE_URL is not None: + try: + conn = MongoClient(DATABASE_URL, server_api=ServerApi("1")) + db = conn.luna + old_config = db.settings.deployConfig.find_one({"_id": BOT_ID}) + config_dict = db.settings.config.find_one({"_id": BOT_ID}) + if old_config is not None: + del old_config["_id"] + if ( + old_config is not None + and old_config == dict(dotenv_values("config.env")) + or old_config is None + ) and config_dict is not None: + environ["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] + environ["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"] + conn.close() + except Exception as e: + error(f"Database ERROR: {e}") UPSTREAM_REPO = environ.get("UPSTREAM_REPO", "") if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = "https://github.com/5hojib/Aeon" + UPSTREAM_REPO = None UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "") if len(UPSTREAM_BRANCH) == 0: UPSTREAM_BRANCH = "main" -if path.exists(".git"): - run(["rm", "-rf", ".git"], check=False) - -update = run( - [ - f"git init -q \ - && git config --global user.email yesiamshojib@gmail.com \ - && git config --global user.name 5hojib \ - && git add . \ - && git commit -sm update -q \ - && git remote add origin {UPSTREAM_REPO} \ - && git fetch origin -q \ - && git reset --hard origin/{UPSTREAM_BRANCH} -q" - ], - shell=True, - check=False, -) - -if update.returncode == 0: - info("Successfully updated with latest commit from UPSTREAM_REPO") -else: - error( - "Something went wrong while updating, check UPSTREAM_REPO if valid or not!" +if UPSTREAM_REPO is not None: + if path.exists(".git"): + run(["rm", "-rf", ".git"], check=False) + + update = run( + [ + f"git init -q \ + && git config --global user.email yesiamshojib@gmail.com \ + && git config --global user.name 5hojib \ + && git add . \ + && git commit -sm update -q \ + && git remote add origin {UPSTREAM_REPO} \ + && git fetch origin -q \ + && git reset --hard origin/{UPSTREAM_BRANCH} -q" + ], + shell=True, + check=False, ) + + if update.returncode == 0: + info("Successfully updated with latest commit from UPSTREAM_REPO") + else: + error( + "Something went wrong while updating, check UPSTREAM_REPO if valid or not!" + ) diff --git a/web/nodes.py b/web/nodes.py index 8a493d2d4..f75bf7d07 100644 --- a/web/nodes.py +++ b/web/nodes.py @@ -2,6 +2,8 @@ from anytree import NodeMixin +DOWNLOAD_DIR = "/usr/src/app/downloads/" + class TorNode(NodeMixin): def __init__( @@ -23,7 +25,7 @@ def __init__( if parent is not None: self.parent = parent if size is not None: - self.size = size + self.fsize = size if priority is not None: self.priority = priority if file_id is not None: @@ -37,13 +39,13 @@ def qb_get_folders(path): def get_folders(path): - fs = re_findall("/usr/src/app/downloads/[0-9]+/(.+)", path)[0] + fs = re_findall(f"{DOWNLOAD_DIR}[0-9]+/(.+)", path)[0] return fs.split("/") -def make_tree(res, aria2=False): - parent = TorNode("Torrent") - if not aria2: +def make_tree(res, tool=False): + if tool == "qbit": + parent = TorNode("Torrent") for i in res: folders = qb_get_folders(i.name) if len(folders) > 1: @@ -78,7 +80,8 @@ def make_tree(res, aria2=False): file_id=i.id, progress=round(i.progress * 100, 5), ) - else: + elif tool == "aria": + parent = TorNode("Torrent") for i in res: folders = get_folders(i["path"]) priority = 1 @@ -120,6 +123,24 @@ def make_tree(res, aria2=False): (int(i["completedLength"]) / int(i["length"])) * 100, 5 ), ) + + else: + parent = TorNode("Torrent") + priority = 1 + for i in res["files"]: + TorNode( + i["filename"], + is_file=True, + parent=parent, + size=float(i["mb"]) * 1048576, + priority=priority, + file_id=i["nzf_id"], + progress=round( + ((float(i["mb"]) - float(i["mbleft"])) / float(i["mb"])) * 100, + 5, + ), + ) + return create_list(parent, ["", 0]) @@ -127,8 +148,8 @@ def create_list(par, msg): if par.name != ".unwanted": msg[0] += "" for i in par.children: + msg[0] += "
- " if i.is_folder: - msg[0] += "
- " if i.name != ".unwanted": msg[0] += ( f' ' @@ -137,14 +158,13 @@ def create_list(par, msg): msg[0] += "
" msg[1] += 1 else: - msg[0] += "- " if i.priority == 0: msg[0] += ( - f' / {i.progress}%' + f' / {i.progress}%' ) else: msg[0] += ( - f' / {i.progress}%' + f' / {i.progress}%' ) msg[0] += ( f'' diff --git a/web/wserver.py b/web/wserver.py index b4b8c0eba..1faa50f86 100644 --- a/web/wserver.py +++ b/web/wserver.py @@ -1,8 +1,9 @@ from time import sleep +from asyncio import get_event_loop from logging import INFO, FileHandler, StreamHandler, getLogger, basicConfig from flask import Flask, request -from aria2p import API +from aria2p import API as ariaAPI from aria2p import Client as ariaClient from qbittorrentapi import Client as qbClient from qbittorrentapi import NotFound404Error @@ -11,7 +12,7 @@ app = Flask(__name__) -aria2 = API(ariaClient(host="http://localhost", port=6800, secret="")) +web_loop = get_event_loop() xnox_client = qbClient( host="localhost", @@ -21,6 +22,8 @@ HTTPADAPTER_ARGS={"pool_maxsize": 200, "pool_block": True}, ) +aria2 = ariaAPI(ariaClient(host="http://localhost", port=6800, secret="")) + basicConfig( format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", handlers=[FileHandler("log.txt"), StreamHandler()], @@ -36,7 +39,7 @@
Torrent File Selector - +