diff --git a/bot/__init__.py b/bot/__init__.py index 9096f2835..90ea575c4 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -1,3 +1,4 @@ +import subprocess from asyncio import Lock, new_event_loop, set_event_loop from logging import ( ERROR, @@ -10,7 +11,7 @@ ) from socket import setdefaulttimeout from time import time -import subprocess + from apscheduler.schedulers.asyncio import AsyncIOScheduler from aria2p import API as ariaAPI from aria2p import Client as ariaClient @@ -111,5 +112,4 @@ shorteners_list.append({"domain": temp[0], "api_key": temp[1]}) - scheduler = AsyncIOScheduler(timezone=str(get_localzone()), event_loop=bot_loop) diff --git a/bot/__main__.py b/bot/__main__.py index ff745141d..da71ca3af 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,11 +1,10 @@ from asyncio import gather from signal import SIGINT, signal -from .core.config_manager import Config - from . import LOGGER, bot_loop -from .core.handlers import add_handlers from .core.aeon_client import TgClient +from .core.config_manager import Config +from .core.handlers import add_handlers from .core.startup import ( load_configurations, load_settings, @@ -25,9 +24,9 @@ restart_notification, ) - Config.load() + async def main(): await load_settings() await gather(TgClient.start_bot(), TgClient.start_user()) diff --git a/bot/core/handlers.py b/bot/core/handlers.py index 0e72a9d13..d672d365e 100644 --- a/bot/core/handlers.py +++ b/bot/core/handlers.py @@ -180,7 +180,7 @@ def add_handlers(): ), ) TgClient.bot.add_handler( - CallbackQueryHandler(rss_listener, filters=regex("^rss")) + CallbackQueryHandler(rss_listener, filters=regex("^rss")), ) TgClient.bot.add_handler( MessageHandler( diff --git a/bot/core/startup.py b/bot/core/startup.py index 72a517fad..f46731832 100644 --- a/bot/core/startup.py +++ b/bot/core/startup.py @@ -1,12 +1,11 @@ -from asyncio import create_subprocess_exec, create_subprocess_shell - import subprocess +from asyncio import create_subprocess_exec, create_subprocess_shell +from os import environ from aiofiles import open as aiopen from aiofiles.os import makedirs, remove from aiofiles.os import path as aiopath from aioshutil import rmtree -from os import environ, getcwd from bot import ( LOGGER, @@ -17,14 +16,14 @@ extension_filter, index_urls, qbit_options, - xnox_client, rss_dict, user_data, + xnox_client, ) from bot.helper.ext_utils.db_handler import database -from .config_manager import Config from .aeon_client import TgClient +from .config_manager import Config def update_qb_options(): @@ -55,7 +54,7 @@ async def load_settings(): BOT_ID = Config.BOT_TOKEN.split(":", 1)[0] config_file = Config.get_all() old_config = await database.db.settings.deployConfig.find_one( - {"_id": BOT_ID} + {"_id": BOT_ID}, ) if old_config is None: database.db.settings.deployConfig.replace_one( @@ -223,9 +222,9 @@ async def load_configurations(): PORT = environ.get("BASE_URL_PORT") or environ.get("PORT") await create_subprocess_shell( - f"gunicorn web.wserver:app --bind 0.0.0.0:{PORT} --worker-class gevent", - ) - + f"gunicorn web.wserver:app --bind 0.0.0.0:{PORT} --worker-class gevent", + ) + if await aiopath.exists("accounts.zip"): if await aiopath.exists("accounts"): await rmtree("accounts") diff --git a/bot/helper/common.py b/bot/helper/common.py index d85a3401a..b8052f42c 100644 --- a/bot/helper/common.py +++ b/bot/helper/common.py @@ -22,8 +22,8 @@ task_dict_lock, user_data, ) -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config from .ext_utils.bot_utils import get_size_bytes, new_task, sync_to_async from .ext_utils.bulk_links import extract_bulk_links @@ -165,19 +165,19 @@ async def before_start(self): else ["aria2", "!qB"] ) if self.link not in ["rcl", "gdl"]: - if is_rclone_path(self.link): - if not self.link.startswith("mrcc:") and self.user_dict.get( - "user_tokens", - False, - ): - self.link = f"mrcc:{self.link}" - await self.is_token_exists(self.link, "dl") - elif is_gdrive_link(self.link): - if not self.link.startswith( - ("mtp:", "tp:", "sa:"), - ) and self.user_dict.get("user_tokens", False): - self.link = f"mtp:{self.link}" - await self.is_token_exists(self.link, "dl") + if is_rclone_path(self.link): + if not self.link.startswith("mrcc:") and self.user_dict.get( + "user_tokens", + False, + ): + self.link = f"mrcc:{self.link}" + await self.is_token_exists(self.link, "dl") + elif is_gdrive_link(self.link): + if not self.link.startswith( + ("mtp:", "tp:", "sa:"), + ) and self.user_dict.get("user_tokens", False): + self.link = f"mtp:{self.link}" + await self.is_token_exists(self.link, "dl") elif self.link == "rcl": if not self.is_ytdlp: self.link = await RcloneList(self).get_rclone_path("rcd") @@ -545,7 +545,9 @@ async def init_bulk(self, input_list, bulk_start, bulk_end, obj): async def decompress_zst(self, dl_path, is_dir=False): if is_dir: for dirpath, _, files in await sync_to_async( - walk, dl_path, topdown=False + walk, + dl_path, + topdown=False, ): for file_ in files: if file_.endswith(".zst"): @@ -591,7 +593,7 @@ async def decompress_zst(self, dl_path, is_dir=False): except: stderr = "Unable to decode the error!" LOGGER.error( - f"{stderr}. Unable to extract zst file!. Path: {dl_path}" + f"{stderr}. Unable to extract zst file!. Path: {dl_path}", ) elif not self.seed: await remove(dl_path) @@ -890,7 +892,9 @@ async def generate_sample_video(self, dl_path, gid, unwanted_files, ft_delete): return new_folder else: for dirpath, _, files in await sync_to_async( - walk, dl_path, topdown=False + walk, + dl_path, + topdown=False, ): for file_ in files: f_path = ospath.join(dirpath, file_) @@ -1019,7 +1023,9 @@ async def proceed_convert(m_path): return output_file else: for dirpath, _, files in await sync_to_async( - walk, dl_path, topdown=False + walk, + dl_path, + topdown=False, ): for file_ in files: if self.is_cancelled: @@ -1068,7 +1074,9 @@ async def generate_screenshots(self, dl_path): else: LOGGER.info(f"Creating Screenshot for: {dl_path}") for dirpath, _, files in await sync_to_async( - walk, dl_path, topdown=False + walk, + dl_path, + topdown=False, ): for file_ in files: f_path = ospath.join(dirpath, file_) @@ -1094,7 +1102,10 @@ async def substitute(self, dl_path): res = "" try: name = sub( - rf"{pattern}", res, name, flags=IGNORECASE if sen else 0 + rf"{pattern}", + res, + name, + flags=IGNORECASE if sen else 0, ) except Exception as e: LOGGER.error( @@ -1213,7 +1224,9 @@ async def proceed_ffmpeg(self, dl_path, gid): checked = True async with task_dict_lock: task_dict[self.mid] = FFmpegStatus( - self, gid, "FFmpeg" + self, + gid, + "FFmpeg", ) await cpu_eater_lock.acquire() LOGGER.info(f"Running ffmpeg cmd for: {f_path}") diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py index 2f884790b..b41bfe47f 100644 --- a/bot/helper/ext_utils/db_handler.py +++ b/bot/helper/ext_utils/db_handler.py @@ -7,8 +7,8 @@ from pymongo.server_api import ServerApi from bot import LOGGER, qbit_options, rss_dict, user_data -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config class DbManager: diff --git a/bot/helper/ext_utils/files_utils.py b/bot/helper/ext_utils/files_utils.py index fc3146b06..c687fc3c4 100644 --- a/bot/helper/ext_utils/files_utils.py +++ b/bot/helper/ext_utils/files_utils.py @@ -174,7 +174,8 @@ async def count_files_and_folders(path, extension_filter, unwanted_files=None): def get_base_name(orig_path): extension = next( - (ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), "" + (ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), + "", ) if extension != "": return re_split(f"{extension}$", orig_path, maxsplit=1, flags=IGNORECASE)[0] diff --git a/bot/helper/ext_utils/media_utils.py b/bot/helper/ext_utils/media_utils.py index ab36ffb13..e49a5f209 100644 --- a/bot/helper/ext_utils/media_utils.py +++ b/bot/helper/ext_utils/media_utils.py @@ -154,7 +154,7 @@ async def is_multi_streams(path): ) except Exception as e: LOGGER.error( - f"Get Video Streams: {e}. Mostly File not found! - File: {path}" + f"Get Video Streams: {e}. Mostly File not found! - File: {path}", ) return False if result[0] and result[2] == 0: @@ -230,12 +230,12 @@ async def get_document_type(path): is_video = True except Exception as e: LOGGER.error( - f"Get Document Type: {e}. Mostly File not found! - File: {path}" + f"Get Document Type: {e}. Mostly File not found! - File: {path}", ) if mime_type.startswith("audio"): return False, True, False if not mime_type.startswith("video") and not mime_type.endswith( - "octet-stream" + "octet-stream", ): return is_video, is_audio, is_image if mime_type.startswith("video"): diff --git a/bot/helper/listeners/aria2_listener.py b/bot/helper/listeners/aria2_listener.py index a1c834462..876d9e9cd 100644 --- a/bot/helper/listeners/aria2_listener.py +++ b/bot/helper/listeners/aria2_listener.py @@ -151,7 +151,10 @@ async def _on_bt_download_complete(api, gid): async with task_dict_lock: if task.listener.mid not in task_dict: await sync_to_async( - api.remove, [download], force=True, files=True + api.remove, + [download], + force=True, + files=True, ) return task_dict[task.listener.mid] = Aria2Status(task.listener, gid, True) diff --git a/bot/helper/listeners/qbit_listener.py b/bot/helper/listeners/qbit_listener.py index b8c0bad92..ffabc848b 100644 --- a/bot/helper/listeners/qbit_listener.py +++ b/bot/helper/listeners/qbit_listener.py @@ -10,9 +10,9 @@ intervals, qb_listener_lock, qb_torrents, - xnox_client, task_dict, task_dict_lock, + xnox_client, ) from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import new_task, sync_to_async @@ -61,7 +61,8 @@ async def _stop_duplicate(tor): if task := await get_task_by_gid(tor.hash[:12]): if task.listener.stop_duplicate: task.listener.name = tor.content_path.rsplit("/", 1)[-1].rsplit( - ".!qB", 1 + ".!qB", + 1, )[0] msg, button = await stop_duplicate_check(task.listener) if msg: diff --git a/bot/helper/listeners/task_listener.py b/bot/helper/listeners/task_listener.py index 0048eaf7e..fd29f2ca0 100644 --- a/bot/helper/listeners/task_listener.py +++ b/bot/helper/listeners/task_listener.py @@ -109,12 +109,12 @@ async def on_download_complete(self): self.same_dir[self.folder_name]["total"] -= 1 spath = f"{self.dir}{self.folder_name}" des_id = next( - iter(self.same_dir[self.folder_name]["tasks"]) + iter(self.same_dir[self.folder_name]["tasks"]), ) des_path = f"{Config.DOWNLOAD_DIR}{des_id}{self.folder_name}" await makedirs(des_path, exist_ok=True) LOGGER.info( - f"Moving files from {self.mid} to {des_id}" + f"Moving files from {self.mid} to {des_id}", ) for item in await listdir(spath): if item.endswith((".aria2", ".!qB")): @@ -245,7 +245,10 @@ async def on_download_complete(self): if self.is_leech and not self.compress: await self.proceed_split( - up_dir, unwanted_files_size, unwanted_files, gid + up_dir, + unwanted_files_size, + unwanted_files, + gid, ) if self.is_cancelled: return diff --git a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py index 5e4dad613..91b17f4aa 100644 --- a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py +++ b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py @@ -238,7 +238,7 @@ def _repair_download(url, session): return mediafire(f"https://mediafire.com/{new_link[0]}") except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if session is None: @@ -264,7 +264,7 @@ def _repair_download(url, session): except Exception as e: session.close() raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if html.xpath("//div[@class='passwordPrompt']"): session.close() @@ -290,7 +290,7 @@ def osdn(url): html = HTML(session.get(url).text) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if not (direct_link := html.xapth('//a[@class="mirror_link"]/@href')): raise DirectDownloadLinkException("ERROR: Direct link not found") @@ -349,7 +349,7 @@ def hxfile(url): ) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if direct_link := html.xpath("//a[@class='btn btn-dow']/@href"): header = f"Referer: {url}" @@ -367,7 +367,7 @@ def onedrive(link): link_data = parse_qs(parsed_link.query) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if not link_data: raise DirectDownloadLinkException("ERROR: Unable to find link_data") @@ -390,7 +390,7 @@ def onedrive(link): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if "@content.downloadUrl" not in resp: raise DirectDownloadLinkException("ERROR: Direct link not found") @@ -412,7 +412,7 @@ def pixeldrain(url): resp = session.get(info_link).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if resp["success"]: return dl_link @@ -447,7 +447,7 @@ def racaty(url): html = HTML(session.post(url, data=json_data).text) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if direct_link := html.xpath("//a[@id='uniqueExpirylink']/@href"): return direct_link[0] @@ -541,7 +541,7 @@ def solidfiles(url): return loads(mainOptions)["downloadUrl"] except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e @@ -551,7 +551,7 @@ def krakenfiles(url): _res = session.get(url) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e html = HTML(_res.text) if post_url := html.xpath('//form[@id="dl-form"]/@action'): @@ -562,7 +562,7 @@ def krakenfiles(url): data = {"token": token[0]} else: raise DirectDownloadLinkException( - "ERROR: Unable to find token for post." + "ERROR: Unable to find token for post.", ) try: _json = session.post(post_url, data=data).json() @@ -583,7 +583,7 @@ def uploadee(url): html = HTML(session.get(url).text) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if link := html.xpath("//a[@id='d_l']/@href"): return link[0] @@ -632,7 +632,7 @@ def terabox(url, video_quality="HD Video", save_dir="HD_Video"): break except RequestException as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e else: raise DirectDownloadLinkException("ERROR: Unable to fetch the JSON data") @@ -689,7 +689,7 @@ def filepress(url): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if "data" not in res: raise DirectDownloadLinkException(f'ERROR: {res["statusText"]}') @@ -715,7 +715,7 @@ def gdtot(url): ) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if ( drive_link := findall(r"myDl\('(.*?)'\)", res.text) @@ -774,7 +774,11 @@ def sharer_scraper(url): ) try: res = cget( - "POST", url, cookies=res.cookies, headers=headers, data=data + "POST", + url, + cookies=res.cookies, + headers=headers, + data=data, ).json() except Exception as e: raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e @@ -812,7 +816,7 @@ def wetransfer(url): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if "direct_link" in res: return res["direct_link"] @@ -834,7 +838,7 @@ def akmfiles(url): ) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if direct_link := html.xpath("//a[contains(@class,'btn btn-dow')]/@href"): return direct_link[0] @@ -849,7 +853,7 @@ def shrdsk(url): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if "download_data" not in _json: raise DirectDownloadLinkException("ERROR: Download data not found") @@ -862,7 +866,7 @@ def shrdsk(url): return _res.headers["Location"] except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e raise DirectDownloadLinkException("ERROR: cannot find direct link in headers") @@ -884,7 +888,7 @@ def __singleItem(session, itemId): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e data = _json["data"] if not data: @@ -925,7 +929,7 @@ def __fetch_links(session, _id=0, folderPath=""): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e data = _json["data"] if not data: @@ -1283,7 +1287,7 @@ def send_cm_file(url, file_id=None): return (_res.headers["Location"], "Referer: https://send.cm/") except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if _passwordNeed: raise DirectDownloadLinkException( @@ -1319,7 +1323,9 @@ def __collectFolders(html): folders_urls = html.xpath("//h6/a/@href") folders_names = html.xpath("//h6/a/text()") for folders_url, folders_name in zip( - folders_urls, folders_names, strict=False + folders_urls, + folders_names, + strict=False, ): folders.append( { @@ -1347,7 +1353,10 @@ def __getFiles(html): file_names = html.xpath('//tr[@class="selectable"]//a/text()') sizes = html.xpath('//tr[@class="selectable"]//span/text()') for href, file_name, size_text in zip( - hrefs, file_names, sizes, strict=False + hrefs, + file_names, + sizes, + strict=False, ): files.append( { @@ -1478,7 +1487,7 @@ def easyupload(url): json_resp = session.post(url=action_url, data=data).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if "download_link" in json_resp: return json_resp["download_link"] @@ -1540,7 +1549,7 @@ def filelions_and_streamwish(url): ).json() except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if _res["status"] != 200: raise DirectDownloadLinkException(f"ERROR: {_res['msg']}") @@ -1573,7 +1582,7 @@ def streamvid(url: str): html = HTML(session.get(url).text) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if quality_defined: data = {} @@ -1626,7 +1635,7 @@ def streamhub(url): html = HTML(session.get(url).text) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if not (inputs := html.xpath('//form[@name="F1"]//input')): raise DirectDownloadLinkException("ERROR: No inputs found") @@ -1640,7 +1649,7 @@ def streamhub(url): html = HTML(session.post(url, data=data).text) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if directLink := html.xpath( '//a[@class="btn btn-primary btn-go downloadbtn"]/@href', @@ -1657,7 +1666,7 @@ def pcloud(url): res = session.get(url) except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e if link := findall(r".downloadlink.:..(https:.*)..", res.text): return link[0].replace(r"\/", "/") @@ -1687,7 +1696,7 @@ def qiwi(url): res = session.get(url).text except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e tree = HTML(res) if name := tree.xpath('//h1[@class="page_TextHeading__VsM7r"]/text()'): @@ -1738,7 +1747,7 @@ def berkasdrive(url): sesi = session.get(url).text except Exception as e: raise DirectDownloadLinkException( - f"ERROR: {e.__class__.__name__}" + f"ERROR: {e.__class__.__name__}", ) from e html = HTML(sesi) if link := html.xpath("//script")[0].text.split('"')[1]: diff --git a/bot/helper/mirror_leech_utils/download_utils/qbit_download.py b/bot/helper/mirror_leech_utils/download_utils/qbit_download.py index 80f7c36df..a0f72e2ea 100644 --- a/bot/helper/mirror_leech_utils/download_utils/qbit_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/qbit_download.py @@ -3,7 +3,7 @@ from aiofiles.os import path as aiopath from aiofiles.os import remove -from bot import LOGGER, xnox_client, task_dict, task_dict_lock +from bot import LOGGER, task_dict, task_dict_lock, xnox_client from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import bt_selection_buttons, sync_to_async from bot.helper.ext_utils.task_manager import check_running_tasks @@ -79,13 +79,14 @@ async def add_qb_torrent(listener, path, ratio, seed_time): async with task_dict_lock: task_dict[listener.mid] = QbittorrentStatus( - listener, queued=add_to_queue + listener, + queued=add_to_queue, ) await on_download_start(f"{listener.mid}") if add_to_queue: LOGGER.info( - f"Added to Queue/Download: {tor_info.name} - Hash: {ext_hash}" + f"Added to Queue/Download: {tor_info.name} - Hash: {ext_hash}", ) else: LOGGER.info(f"QbitDownload started: {tor_info.name} - Hash: {ext_hash}") diff --git a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py index 72f0faa79..3cd39fe0f 100644 --- a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py @@ -51,7 +51,7 @@ async def _on_download_start(self, file_id, from_queue): LOGGER.info(f"Download from Telegram: {self._listener.name}") else: LOGGER.info( - f"Start Queued Download from Telegram: {self._listener.name}" + f"Start Queued Download from Telegram: {self._listener.name}", ) async def _on_download_progress(self, current, _): diff --git a/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py b/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py index 2ccbe9efb..ebcec1e58 100644 --- a/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py @@ -128,7 +128,9 @@ def _on_download_progress(self, d): async def _on_download_start(self, from_queue=False): async with task_dict_lock: task_dict[self._listener.mid] = YtDlpStatus( - self._listener, self, self._gid + self._listener, + self, + self._gid, ) if not from_queue: await self._listener.on_download_start() @@ -366,7 +368,7 @@ def _set_options(self, options): elif value.lower() == "false": value = False elif value.startswith(("{", "[", "(")) and value.endswith( - ("}", "]", ")") + ("}", "]", ")"), ): value = eval(value) diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/clone.py b/bot/helper/mirror_leech_utils/gdrive_utils/clone.py index 45e0b4e48..3c2d5e128 100644 --- a/bot/helper/mirror_leech_utils/gdrive_utils/clone.py +++ b/bot/helper/mirror_leech_utils/gdrive_utils/clone.py @@ -36,7 +36,7 @@ def user_setting(self): self.listener.up_dest = self.listener.up_dest.replace("tp:", "", 1) self.use_sa = False elif self.listener.up_dest.startswith( - "sa:" + "sa:", ) or self.listener.link.startswith( "sa:", ): @@ -62,7 +62,8 @@ def clone(self): mime_type = meta.get("mimeType") if mime_type == self.G_DRIVE_DIR_MIME_TYPE: dir_id = self.create_directory( - meta.get("name"), self.listener.up_dest + meta.get("name"), + self.listener.up_dest, ) self._clone_folder(meta.get("name"), meta.get("id"), dir_id) durl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/download.py b/bot/helper/mirror_leech_utils/gdrive_utils/download.py index cbc2560c8..edf045d69 100644 --- a/bot/helper/mirror_leech_utils/gdrive_utils/download.py +++ b/bot/helper/mirror_leech_utils/gdrive_utils/download.py @@ -89,7 +89,7 @@ def _download_folder(self, folder_id, path, folder_name): elif not ospath.isfile( f"{path}{filename}", ) and not filename.lower().endswith( - tuple(self.listener.extension_filter) + tuple(self.listener.extension_filter), ): self._download_file(file_id, path, filename, mime_type) if self.listener.is_cancelled: diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/helper.py b/bot/helper/mirror_leech_utils/gdrive_utils/helper.py index 61a51348c..1622dc3b4 100644 --- a/bot/helper/mirror_leech_utils/gdrive_utils/helper.py +++ b/bot/helper/mirror_leech_utils/gdrive_utils/helper.py @@ -82,7 +82,7 @@ def authorize(self): self.sa_number = len(json_files) self.sa_index = randrange(self.sa_number) LOGGER.info( - f"Authorizing with {json_files[self.sa_index]} service account" + f"Authorizing with {json_files[self.sa_index]} service account", ) credentials = service_account.Credentials.from_service_account_file( f"accounts/{json_files[self.sa_index]}", @@ -220,7 +220,7 @@ def create_directory(self, directory_name, dest_id): if not Config.IS_TEAM_DRIVE: self.set_permission(file_id) LOGGER.info( - f'Created G-Drive Folder:\nName: {file.get("name")}\nID: {file_id}' + f'Created G-Drive Folder:\nName: {file.get("name")}\nID: {file_id}', ) return file_id diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/list.py b/bot/helper/mirror_leech_utils/gdrive_utils/list.py index 8226b34cc..4f2684747 100644 --- a/bot/helper/mirror_leech_utils/gdrive_utils/list.py +++ b/bot/helper/mirror_leech_utils/gdrive_utils/list.py @@ -161,7 +161,9 @@ async def _send_list_message(self, msg, button): if not self.listener.is_cancelled: if self._reply_to is None: self._reply_to = await send_message( - self.listener.message, msg, button + self.listener.message, + msg, + button, ) else: await edit_message(self._reply_to, msg, button) @@ -198,7 +200,9 @@ async def get_items_buttons(self): buttons.data_button("Files", "gdq itype files", position="footer") else: buttons.data_button( - "Folders", "gdq itype folders", position="footer" + "Folders", + "gdq itype folders", + position="footer", ) if self.list_status == "gdu" or len(self.items_list) > 0: buttons.data_button("Choose Current Path", "gdq cur", position="footer") diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/search.py b/bot/helper/mirror_leech_utils/gdrive_utils/search.py index 673139a5d..b659609b8 100644 --- a/bot/helper/mirror_leech_utils/gdrive_utils/search.py +++ b/bot/helper/mirror_leech_utils/gdrive_utils/search.py @@ -9,7 +9,11 @@ class GoogleDriveSearch(GoogleDriveHelper): def __init__( - self, stop_dup=False, no_multi=False, is_recursive=True, item_type="" + self, + stop_dup=False, + no_multi=False, + is_recursive=True, + item_type="", ): super().__init__() self._stop_dup = stop_dup diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/upload.py b/bot/helper/mirror_leech_utils/gdrive_utils/upload.py index 064859a8f..2d2b769bb 100644 --- a/bot/helper/mirror_leech_utils/gdrive_utils/upload.py +++ b/bot/helper/mirror_leech_utils/gdrive_utils/upload.py @@ -50,7 +50,7 @@ def upload(self, unwanted_files, ft_delete): try: if ospath.isfile(self._path): if self._path.lower().endswith( - tuple(self.listener.extension_filter) + tuple(self.listener.extension_filter), ): raise Exception( "This file extension is excluded by extension filter!", @@ -76,7 +76,10 @@ def upload(self, unwanted_files, ft_delete): self.listener.up_dest, ) result = self._upload_dir( - self._path, dir_id, unwanted_files, ft_delete + self._path, + dir_id, + unwanted_files, + ft_delete, ) if result is None: raise Exception("Upload has been manually cancelled!") @@ -178,7 +181,9 @@ def _upload_file( if ospath.getsize(file_path) == 0: media_body = MediaFileUpload( - file_path, mimetype=mime_type, resumable=False + file_path, + mimetype=mime_type, + resumable=False, ) response = ( self.service.files() diff --git a/bot/helper/mirror_leech_utils/rclone_utils/list.py b/bot/helper/mirror_leech_utils/rclone_utils/list.py index ab1627832..252f6f57c 100644 --- a/bot/helper/mirror_leech_utils/rclone_utils/list.py +++ b/bot/helper/mirror_leech_utils/rclone_utils/list.py @@ -181,7 +181,9 @@ async def _send_list_message(self, msg, button): if not self.listener.is_cancelled: if self._reply_to is None: self._reply_to = await send_message( - self.listener.message, msg, button + self.listener.message, + msg, + button, ) else: await edit_message(self._reply_to, msg, button) diff --git a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py index 3c73fa557..2dc3a4963 100644 --- a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py +++ b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py @@ -361,7 +361,7 @@ async def upload(self, path, unwanted_files, ft_delete): and not self._listener.rc_flags ): cmd.extend( - ("--drive-chunk-size", "128M", "--drive-upload-cutoff", "128M") + ("--drive-chunk-size", "128M", "--drive-upload-cutoff", "128M"), ) result = await self._start_upload(cmd, remote_type) @@ -403,7 +403,7 @@ async def upload(self, path, unwanted_files, ft_delete): if not err: err = "Use /shell cat rlog.txt to see more information" LOGGER.error( - f"while getting link. Path: {destination} | Stderr: {err}" + f"while getting link. Path: {destination} | Stderr: {err}", ) link = "" if self._listener.is_cancelled: diff --git a/bot/helper/mirror_leech_utils/status_utils/aria2_status.py b/bot/helper/mirror_leech_utils/status_utils/aria2_status.py index dd968420d..0a786e16c 100644 --- a/bot/helper/mirror_leech_utils/status_utils/aria2_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/aria2_status.py @@ -95,7 +95,10 @@ async def cancel_task(self): f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}", ) await sync_to_async( - aria2.remove, [self._download], force=True, files=True + aria2.remove, + [self._download], + force=True, + files=True, ) elif downloads := self._download.followed_by: LOGGER.info(f"Cancelling Download: {self.name()}") @@ -111,5 +114,8 @@ async def cancel_task(self): msg = "Download stopped by user!" await self.listener.on_download_error(msg) await sync_to_async( - aria2.remove, [self._download], force=True, files=True + aria2.remove, + [self._download], + force=True, + files=True, ) diff --git a/bot/helper/mirror_leech_utils/telegram_uploader.py b/bot/helper/mirror_leech_utils/telegram_uploader.py index f82259f36..6f843ffdf 100644 --- a/bot/helper/mirror_leech_utils/telegram_uploader.py +++ b/bot/helper/mirror_leech_utils/telegram_uploader.py @@ -31,8 +31,8 @@ wait_exponential, ) -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import sync_to_async from bot.helper.ext_utils.files_utils import ( clean_unwanted, @@ -166,7 +166,8 @@ async def _prepare_file(self, file_, dirpath, delete_file): name = get_base_name(file_) ext = file_.split(name, 1)[1] elif match := re_match( - r".+(?=\..+\.0*\d+$)|.+(?=\.part\d+\..+$)", file_ + r".+(?=\..+\.0*\d+$)|.+(?=\.part\d+\..+$)", + file_, ): name = match.group(0) ext = file_.split(name, 1)[1] @@ -294,7 +295,8 @@ async def upload(self, o_files, ft_delete): x for v in self._media_dict.values() for x in v ] match = re_match( - r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", f_path + r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", + f_path, ) if not match or ( match and match.group(0) not in group_lists @@ -303,7 +305,9 @@ async def upload(self, o_files, ft_delete): for subkey, msgs in list(value.items()): if len(msgs) > 1: await self._send_media_group( - subkey, key, msgs + subkey, + key, + msgs, ) if ( self._listener.mixed_leech @@ -377,7 +381,7 @@ async def upload(self, o_files, ft_delete): return if self._total_files <= self._corrupted: await self._listener.on_upload_error( - f"Files Corrupted or unable to upload. {self._error or 'Check logs!'}" + f"Files Corrupted or unable to upload. {self._error or 'Check logs!'}", ) return LOGGER.info(f"Leech Completed: {self._listener.name}") diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index fe73ea309..d6611a183 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -5,8 +5,8 @@ from pyrogram.errors import FloodPremiumWait, FloodWait from bot import LOGGER, intervals, status_dict, task_dict_lock -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import SetInterval from bot.helper.ext_utils.exceptions import TgLinkException from bot.helper.ext_utils.status_utils import get_readable_message @@ -124,7 +124,7 @@ async def get_tg_link_message(link): ) if not TgClient.user: raise TgLinkException( - "USER_SESSION_STRING required for this private link!" + "USER_SESSION_STRING required for this private link!", ) chat = msg[1] @@ -155,7 +155,8 @@ async def get_tg_link_message(link): if not private: try: message = await TgClient.bot.get_messages( - chat_id=chat, message_ids=msg_id + chat_id=chat, + message_ids=msg_id, ) if message.empty: private = True diff --git a/bot/modules/bot_settings.py b/bot/modules/bot_settings.py index 9372d72a6..a1fa69edc 100644 --- a/bot/modules/bot_settings.py +++ b/bot/modules/bot_settings.py @@ -25,19 +25,13 @@ extension_filter, index_urls, intervals, - jd_lock, - nzb_options, - qbit_options, - xnox_client, - sabnzbd_client, task_dict, ) -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient -from bot.core.startup import update_nzb_options, update_qb_options, update_variables +from bot.core.config_manager import Config +from bot.core.startup import update_variables from bot.helper.ext_utils.bot_utils import SetInterval, new_task, sync_to_async from bot.helper.ext_utils.db_handler import database -from bot.helper.ext_utils.jdownloader_booter import jdownloader from bot.helper.ext_utils.task_manager import start_from_queued from bot.helper.mirror_leech_utils.rclone_utils.serve import rclone_serve_booter from bot.helper.telegram_helper.button_build import ButtonMaker diff --git a/bot/modules/cancel_task.py b/bot/modules/cancel_task.py index 3b7fab2e0..77533b133 100644 --- a/bot/modules/cancel_task.py +++ b/bot/modules/cancel_task.py @@ -94,10 +94,12 @@ def create_cancel_buttons(is_sudo, user_id=""): ) buttons.data_button("Seeding", f"canall ms {MirrorStatus.STATUS_SEED} {user_id}") buttons.data_button( - "Spltting", f"canall ms {MirrorStatus.STATUS_SPLIT} {user_id}" + "Spltting", + f"canall ms {MirrorStatus.STATUS_SPLIT} {user_id}", ) buttons.data_button( - "Cloning", f"canall ms {MirrorStatus.STATUS_CLONE} {user_id}" + "Cloning", + f"canall ms {MirrorStatus.STATUS_CLONE} {user_id}", ) buttons.data_button( "Extracting", @@ -124,10 +126,12 @@ def create_cancel_buttons(is_sudo, user_id=""): f"canall ms {MirrorStatus.STATUS_CONVERT} {user_id}", ) buttons.data_button( - "FFmpeg", f"canall ms {MirrorStatus.STATUS_FFMPEG} {user_id}" + "FFmpeg", + f"canall ms {MirrorStatus.STATUS_FFMPEG} {user_id}", ) buttons.data_button( - "Paused", f"canall ms {MirrorStatus.STATUS_PAUSED} {user_id}" + "Paused", + f"canall ms {MirrorStatus.STATUS_PAUSED} {user_id}", ) buttons.data_button("All", f"canall ms All {user_id}") if is_sudo: diff --git a/bot/modules/clone.py b/bot/modules/clone.py index af1414dec..8cee9ce5c 100644 --- a/bot/modules/clone.py +++ b/bot/modules/clone.py @@ -165,7 +165,7 @@ async def _proceed_to_clone(self, sync): if self.multi <= 1: await send_status_message(self.message) flink, mime_type, files, folders, dir_id = await sync_to_async( - drive.clone + drive.clone, ) if msg: await delete_message(msg) diff --git a/bot/modules/file_selector.py b/bot/modules/file_selector.py index 6cd5890b3..b6c4ffc61 100644 --- a/bot/modules/file_selector.py +++ b/bot/modules/file_selector.py @@ -6,10 +6,10 @@ from bot import ( LOGGER, aria2, - xnox_client, task_dict, task_dict_lock, user_data, + xnox_client, ) from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import ( diff --git a/bot/modules/help.py b/bot/modules/help.py index 104768da2..94fca8b49 100644 --- a/bot/modules/help.py +++ b/bot/modules/help.py @@ -28,7 +28,9 @@ async def arg_usage(_, query): ) elif data[2] == "y": await edit_message( - message, COMMAND_USAGE["yt"][0], COMMAND_USAGE["yt"][1] + message, + COMMAND_USAGE["yt"][0], + COMMAND_USAGE["yt"][1], ) elif data[2] == "c": await edit_message( diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py index 9cfed6dae..3936f57ca 100644 --- a/bot/modules/mirror_leech.py +++ b/bot/modules/mirror_leech.py @@ -195,7 +195,7 @@ async def new_event(self): self.folder_name: { "total": self.multi, "tasks": {self.mid}, - } + }, } elif self.same_dir: async with task_dict_lock: @@ -318,7 +318,8 @@ async def new_event(self): ): content_type = await get_content_type(self.link) if content_type is None or re_match( - r"text/html|text/plain", content_type + r"text/html|text/plain", + content_type, ): try: self.link = await sync_to_async(direct_link_generator, self.link) diff --git a/bot/modules/restart.py b/bot/modules/restart.py index 7bb85d1cc..809256aee 100644 --- a/bot/modules/restart.py +++ b/bot/modules/restart.py @@ -8,8 +8,8 @@ from aiofiles.os import remove from bot import LOGGER, intervals, scheduler -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import new_task, sync_to_async from bot.helper.ext_utils.db_handler import database from bot.helper.ext_utils.files_utils import clean_all diff --git a/bot/modules/rss.py b/bot/modules/rss.py index f6fb26b7e..aff154394 100644 --- a/bot/modules/rss.py +++ b/bot/modules/rss.py @@ -460,7 +460,8 @@ async def event_filter(_, __, event): ) handler = client.add_handler( - MessageHandler(pfunc, create(event_filter)), group=-1 + MessageHandler(pfunc, create(event_filter)), + group=-1, ) while handler_dict[user_id]: await sleep(0.5) diff --git a/bot/modules/search.py b/bot/modules/search.py index 6f8dbefb6..4acb6ae89 100644 --- a/bot/modules/search.py +++ b/bot/modules/search.py @@ -24,7 +24,8 @@ async def initiate_search_tools(): if qb_plugins: names = [plugin["name"] for plugin in qb_plugins] await sync_to_async( - xnox_client.search_uninstall_plugin, names=names + xnox_client.search_uninstall_plugin, + names=names, ) await sync_to_async( xnox_client.search_install_plugin, diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py index b2efbca17..424c93c0b 100644 --- a/bot/modules/users_settings.py +++ b/bot/modules/users_settings.py @@ -11,8 +11,8 @@ from pyrogram.handlers import MessageHandler from bot import extension_filter, user_data -from bot.core.config_manager import Config from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import ( get_size_bytes, new_task, @@ -684,7 +684,8 @@ async def edit_user_settings(client, query): buttons = ButtonMaker() if user_dict.get("rclone_path", False): buttons.data_button( - "Reset Rclone Path", f"userset {user_id} rclone_path" + "Reset Rclone Path", + f"userset {user_id} rclone_path", ) buttons.data_button("Back", f"userset {user_id} rclone") buttons.data_button("Close", f"userset {user_id} close") @@ -809,7 +810,8 @@ async def edit_user_settings(client, query): buttons = ButtonMaker() if user_dict.get("name_sub", False): buttons.data_button( - "Remove Name Subtitute", f"userset {user_id} name_sub" + "Remove Name Subtitute", + f"userset {user_id} name_sub", ) buttons.data_button("Back", f"userset {user_id} back") buttons.data_button("Close", f"userset {user_id} close") diff --git a/update.py b/update.py index bba64a0fe..0aae58907 100644 --- a/update.py +++ b/update.py @@ -67,7 +67,9 @@ except Exception as e: log_error(f"Database ERROR: {e}") -UPSTREAM_REPO = config_file.get("UPSTREAM_REPO", "https://github.com/AeonOrg/Aeon-MLTB").strip() +UPSTREAM_REPO = config_file.get( + "UPSTREAM_REPO", "https://github.com/AeonOrg/Aeon-MLTB" +).strip() UPSTREAM_BRANCH = config_file.get("UPSTREAM_BRANCH", "").strip() or "beta" diff --git a/web/nodes.py b/web/nodes.py index 9eb69c0a0..68e5b2d1c 100644 --- a/web/nodes.py +++ b/web/nodes.py @@ -112,7 +112,8 @@ def make_tree(res, tool=False): previous_node = current_node try: progress = round( - (int(i["completedLength"]) / int(i["length"])) * 100, 5 + (int(i["completedLength"]) / int(i["length"])) * 100, + 5, ) except: progress = 0