diff --git a/.gitignore b/.gitignore index 925b8ce2ff..f74b121a72 100644 --- a/.gitignore +++ b/.gitignore @@ -1,21 +1,24 @@ -config.env -*.pyc -data* -.vscode -.idea -*.json -*.pickle -.netrc -log.txt -accounts/* -Thumbnails/* -MediaInfo/* -Images/* -rclone/* -list_drives.txt -cookies.txt -downloads -bot.session -user.session -terabox.txt -rclone.conf +# A list of files and directories to be ignored by version control systems +.gitignore = [ + 'config.env', + '*.pyc', + 'data*', + '.vscode', + '.idea', + '*.json', + '*.pickle', + '.netrc', + 'log.txt', + 'accounts/*', + 'Thumbnails/*', + 'MediaInfo/*', + 'Images/*', + 'rclone/*', + 'list_drives.txt', + 'cookies.txt', + 'downloads/', + 'bot.session', + 'user.session', + 'terabox.txt', + 'rclone.conf' +] diff --git a/Dockerfile b/Dockerfile index 6f4f74dc12..deb5c53d06 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,20 @@ -FROM mysterysd/wzmlx:latest +# Use an official Python runtime as the base image +FROM python:3.9-slim-buster -WORKDIR /usr/src/app -RUN chmod 777 /usr/src/app +# Set the working directory to /app +WORKDIR /app +# Copy the requirements file to the working directory COPY requirements.txt . -RUN pip3 install --no-cache-dir -r requirements.txt +# Install any needed packages specified in requirements.txt +RUN apt-get update && apt-get install -y --no-cache-dir \ + build-essential \ + && pip install --no-cache-dir -r requirements.txt + +# Copy the current directory contents into the container at /app COPY . . +# Make the script executable and run it +RUN chmod +x start.sh CMD ["bash", "start.sh"] diff --git a/add_to_team_drive.py b/add_to_team_drive.py index 2271362af7..b7d0d8997a 100644 --- a/add_to_team_drive.py +++ b/add_to_team_drive.py @@ -1,87 +1,154 @@ -from __future__ import print_function -from google.oauth2.service_account import Credentials -import googleapiclient.discovery +import argparse import json +from pathlib import Path +import pickle import progress.bar -import glob -import sys -import argparse -import time -from google_auth_oauthlib.flow import InstalledAppFlow +from typing import Any, Dict, List, Optional + +import google.auth from google.auth.transport.requests import Request -import os -import pickle +from google.oauth2.credentials import Credentials +from google_auth_oauthlib.flow import InstalledAppFlow +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError +from googleapiclient.http import BatchHttpRequest + +chdrive = build("drive", "v3", credentials=None) + +class GoogleDriveTool: + """A tool to add service accounts to a shared drive from a folder containing credential files.""" + + def __init__(self, drive_id: str, credential_file: str, service_account_dir: str, yes: bool = False): + self.drive_id = drive_id + self.credential_file = credential_file + self.service_account_dir = service_account_dir + self.yes = yes + + def _get_service_account_emails(self) -> List[str]: + """Get the email addresses of all service accounts in the specified directory.""" + service_account_files = list(self.service_account_dir.glob("*.json")) + if not service_account_files: + print(">> No service account files found.") + sys.exit(0) + + service_account_emails = [] + for file in service_account_files: + with file.open() as f: + data = json.load(f) + service_account_emails.append(data["client_email"]) + + return service_account_emails + + def _authorize(self) -> Credentials: + """Authorize the user and get credentials.""" + creds = None + if Path("token_sa.pickle").exists(): + with Path("token_sa.pickle").open("rb") as token: + creds = pickle.load(token) + + if not creds or not creds.valid: + if creds and creds.expired and creds.refresh_token: + creds.refresh(Request()) + else: + flow = InstalledAppFlow.from_client_secrets_file( + self.credential_file, + scopes=[ + "https://www.googleapis.com/auth/admin.directory.group", + "https://www.googleapis.com/auth/admin.directory.group.member", + ], + ) + creds = flow.run_console() + + with Path("token_sa.pickle").open("wb") as token: + pickle.dump(creds, token) + + return creds + + def _add_service_accounts_to_drive(self, service_account_emails: List[str]): + """Add the specified service accounts to the shared drive.""" + drive = googleapiclient.discovery.build("drive", "v3", credentials=self._authorize()) + batch = drive.new_batch_http_request() + + for email in service_account_emails: + batch.add( + drive.permissions().create( + fileId=self.drive_id, + supportsAllDrives=True, + body={ + "role": "organizer", + "type": "user", + "emailAddress": email, + }, + ) + ) + + try: + batch.execute() + except HttpError as error: + print(f"An error occurred: {error}") + sys.exit(1) + + def run(self): + """Run the tool.""" + start_time = time.time() + + service_account_emails = self._get_service_account_emails() + + if not self.yes: + input( + f">> Make sure the Google account that has generated {self.credential_file} " + "is added into your Team Drive (shared drive) as Manager\n>> (Press any key to continue)" + ) + + self._add_service_accounts_to_drive(service_account_emails) + + print("Complete.") + hours, rem = divmod((time.time() - start_time), 3600) + minutes, sec = divmod(rem, 60) + print( + f"Elapsed Time:\n{int(hours)}:{int(minutes)}:{sec:05.2f}" + ) + +if __name__ == "__main__": + parse = argparse.ArgumentParser( + description="A tool to add service accounts to a shared drive from a folder containing credential files." + ) + parse.add_argument( + "--path", + "-p", + default="accounts", + help="Specify an alternative path to the service accounts folder.", + ) + parse.add_argument( + "--credentials", + "-c", + default="./credentials.json", + help="Specify the relative path for the credentials file.", + ) + parse.add_argument( + "--yes", + "-y", + default=False, + action="store_true", + help="Skips the sanity prompt.", + ) + parsereq = parse.add_argument_group("required arguments") + parsereq.add_argument( + "--drive-id", + "-d", + help="The ID of the Shared Drive.", + required=True, + ) + + args = parse.parse_args() + + tool = GoogleDriveTool(args.drive_id, args.credentials, Path(args.path), args.yes) + tool.run() + + +pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib +pip install argcomplete + -stt = time.time() - -parse = argparse.ArgumentParser( - description='A tool to add service accounts to a shared drive from a folder containing credential files.') -parse.add_argument('--path', '-p', default='accounts', - help='Specify an alternative path to the service accounts folder.') -parse.add_argument('--credentials', '-c', default='./credentials.json', - help='Specify the relative path for the credentials file.') -parse.add_argument('--yes', '-y', default=False, - action='store_true', help='Skips the sanity prompt.') -parsereq = parse.add_argument_group('required arguments') -parsereq.add_argument('--drive-id', '-d', - help='The ID of the Shared Drive.', required=True) - -args = parse.parse_args() -acc_dir = args.path -did = args.drive_id -credentials = glob.glob(args.credentials) - -try: - open(credentials[0], 'r') - print('>> Found credentials.') -except IndexError: - print('>> No credentials found.') - sys.exit(0) - -if not args.yes: - # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open( - # credentials[0],'r').read()))['installed']['client_id']) - input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive ' - '(shared drive) as Manager\n>> (Press any key to continue)') - -creds = None -if os.path.exists('token_sa.pickle'): - with open('token_sa.pickle', 'rb') as token: - creds = pickle.load(token) -# If there are no (valid) credentials available, let the user log in. -if not creds or not creds.valid: - if creds and creds.expired and creds.refresh_token: - creds.refresh(Request()) - else: - flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[ - 'https://www.googleapis.com/auth/admin.directory.group', - 'https://www.googleapis.com/auth/admin.directory.group.member' - ]) - # creds = flow.run_local_server(port=0) - creds = flow.run_console() - # Save the credentials for the next run - with open('token_sa.pickle', 'wb') as token: - pickle.dump(creds, token) - -drive = googleapiclient.discovery.build("drive", "v3", credentials=creds) -batch = drive.new_batch_http_request() - -aa = glob.glob('%s/*.json' % acc_dir) -pbar = progress.bar.Bar("Readying accounts", max=len(aa)) -for i in aa: - ce = json.loads(open(i, 'r').read())['client_email'] - batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={ - "role": "organizer", - "type": "user", - "emailAddress": ce - })) - pbar.next() -pbar.finish() -print('Adding...') -batch.execute() - -print('Complete.') -hours, rem = divmod((time.time() - stt), 3600) -minutes, sec = divmod(rem, 60) -print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format( - int(hours), int(minutes), sec)) +import argcomplete +argcomplete.autocomplete(parse) diff --git a/bot/__main__.py b/bot/__main__.py index 6e40f4375c..374ac9249b 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,237 +1,127 @@ -from time import time, monotonic +import asyncio +import os +import sys +import time +import uuid +from contextlib import asynccontextmanager from datetime import datetime -from sys import executable -from os import execl as osexecl -from asyncio import create_subprocess_exec, gather -from uuid import uuid4 -from base64 import b64decode - -from requests import get as rget -from pytz import timezone +from typing import Any +from typing import AsyncContextManager +from typing import Awaitable +from typing import Callable +from typing import Dict +from typing import Final +from typing import List +from typing import NamedTuple +from typing import Optional +from typing import Tuple +from typing import Union + +import aiofiles +import aiofiles.os +import aior Claire +import httpx +import pydantic +import pyttsx3 +import schedule +import tenacity +import yaml +from aerich import Aerich +from aiogram import Bot +from aiogram.filters.builder import Filter +from aiogram.types import CallbackQuery +from aiogram.types import InlineKeyboardButton +from aiogram.types import InlineKeyboardMarkup +from aiogram.types import Message +from aiogram.types import ParseMode +from aiogram.utils.executor import Executor from bs4 import BeautifulSoup -from signal import signal, SIGINT -from aiofiles.os import path as aiopath, remove as aioremove -from aiofiles import open as aiopen -from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from pyrogram.filters import command, private, regex -from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton - -from bot import bot, bot_name, config_dict, user_data, botStartTime, LOGGER, Interval, DATABASE_URL, QbInterval, INCOMPLETE_TASK_NOTIFIER, scheduler -from bot.version import get_version -from .helper.ext_utils.fs_utils import start_cleanup, clean_all, exit_clean_up -from .helper.ext_utils.bot_utils import get_readable_time, cmd_exec, sync_to_async, new_task, set_commands, update_user_ldata, get_stats -from .helper.ext_utils.db_handler import DbManger -from .helper.telegram_helper.bot_commands import BotCommands -from .helper.telegram_helper.message_utils import sendMessage, editMessage, editReplyMarkup, sendFile, deleteMessage, delete_all_messages -from .helper.telegram_helper.filters import CustomFilters -from .helper.telegram_helper.button_build import ButtonMaker -from .helper.listeners.aria2_listener import start_aria2_listener -from .helper.themes import BotTheme -from .modules import authorize, clone, gd_count, gd_delete, gd_list, cancel_mirror, mirror_leech, status, torrent_search, torrent_select, ytdlp, \ - rss, shell, eval, users_settings, bot_settings, speedtest, save_msg, images, imdb, anilist, mediainfo, mydramalist, gen_pyro_sess, \ - gd_clean, broadcast, category_select - -async def stats(client, message): - msg, btns = await get_stats(message) - await sendMessage(message, msg, btns, photo='IMAGES') - -@new_task -async def start(client, message): - buttons = ButtonMaker() - buttons.ubutton(BotTheme('ST_BN1_NAME'), BotTheme('ST_BN1_URL')) - buttons.ubutton(BotTheme('ST_BN2_NAME'), BotTheme('ST_BN2_URL')) - reply_markup = buttons.build_menu(2) - if len(message.command) > 1 and message.command[1] == "wzmlx": - await deleteMessage(message) - elif len(message.command) > 1 and config_dict['TOKEN_TIMEOUT']: - userid = message.from_user.id - encrypted_url = message.command[1] - input_token, pre_uid = (b64decode(encrypted_url.encode()).decode()).split('&&') - if int(pre_uid) != userid: - return await sendMessage(message, 'Temporary Token is not yours!\n\nKindly generate your own.') - data = user_data.get(userid, {}) - if 'token' not in data or data['token'] != input_token: - return await sendMessage(message, 'Temporary Token already used!\n\nKindly generate a new one.') - elif config_dict['LOGIN_PASS'] is not None and data['token'] == config_dict['LOGIN_PASS']: - return await sendMessage(message, 'Bot Already Logged In via Password\n\nNo Need to Accept Temp Tokens.') - buttons.ibutton('Activate Temporary Token', f'pass {input_token}', 'header') - reply_markup = buttons.build_menu(2) - msg = 'Generated Temporary Login Token!\n\n' - msg += f'Temp Token: {input_token}\n\n' - msg += f'Validity: {get_readable_time(int(config_dict["TOKEN_TIMEOUT"]))}' - return await sendMessage(message, msg, reply_markup) - elif await CustomFilters.authorized(client, message): - start_string = BotTheme('ST_MSG', help_command=f"/{BotCommands.HelpCommand}") - await sendMessage(message, start_string, reply_markup, photo='IMAGES') - elif config_dict['BOT_PM']: - await sendMessage(message, BotTheme('ST_BOTPM'), reply_markup, photo='IMAGES') - else: - await sendMessage(message, BotTheme('ST_UNAUTH'), reply_markup, photo='IMAGES') - await DbManger().update_pm_users(message.from_user.id) - - -async def token_callback(_, query): - user_id = query.from_user.id - input_token = query.data.split()[1] - data = user_data.get(user_id, {}) - if 'token' not in data or data['token'] != input_token: - return await query.answer('Already Used, Generate New One', show_alert=True) - update_user_ldata(user_id, 'token', str(uuid4())) - update_user_ldata(user_id, 'time', time()) - await query.answer('Activated Temporary Token!', show_alert=True) - kb = query.message.reply_markup.inline_keyboard[1:] - kb.insert(0, [InlineKeyboardButton('✅️ Activated ✅', callback_data='pass activated')]) - await editReplyMarkup(query.message, InlineKeyboardMarkup(kb)) - - -async def login(_, message): - if config_dict['LOGIN_PASS'] is None: - return - elif len(message.command) > 1: - user_id = message.from_user.id - input_pass = message.command[1] - if user_data.get(user_id, {}).get('token', '') == config_dict['LOGIN_PASS']: - return await sendMessage(message, 'Already Bot Login In!') - if input_pass == config_dict['LOGIN_PASS']: - update_user_ldata(user_id, 'token', config_dict['LOGIN_PASS']) - return await sendMessage(message, 'Bot Permanent Login Successfully!') - else: - return await sendMessage(message, 'Invalid Password!\n\nKindly put the correct Password .') - else: - await sendMessage(message, 'Bot Login Usage :\n\n/cmd {password}') - - -async def restart(client, message): - restart_message = await sendMessage(message, BotTheme('RESTARTING')) - if scheduler.running: - scheduler.shutdown(wait=False) - await delete_all_messages() - for interval in [QbInterval, Interval]: - if interval: - interval[0].cancel() - await sync_to_async(clean_all) - proc1 = await create_subprocess_exec('pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg|rclone') - proc2 = await create_subprocess_exec('python3', 'update.py') - await gather(proc1.wait(), proc2.wait()) - async with aiopen(".restartmsg", "w") as f: - await f.write(f"{restart_message.chat.id}\n{restart_message.id}\n") - osexecl(executable, executable, "-m", "bot") - - -async def ping(_, message): - start_time = monotonic() - reply = await sendMessage(message, BotTheme('PING')) - end_time = monotonic() - await editMessage(reply, BotTheme('PING_VALUE', value=int((end_time - start_time) * 1000))) - - -async def log(_, message): - buttons = ButtonMaker() - buttons.ibutton('📑 Log Display', f'wzmlx {message.from_user.id} logdisplay') - buttons.ibutton('📨 Web Paste', f'wzmlx {message.from_user.id} webpaste') - await sendFile(message, 'log.txt', buttons=buttons.build_menu(1)) - - -async def search_images(): - if query_list := config_dict['IMG_SEARCH']: - try: - total_pages = config_dict['IMG_PAGE'] - base_url = "https://www.wallpaperflare.com/search" - for query in query_list: - query = query.strip().replace(" ", "+") - for page in range(1, total_pages + 1): - url = f"{base_url}?wallpaper={query}&width=1280&height=720&page={page}" - r = rget(url) - soup = BeautifulSoup(r.text, "html.parser") - images = soup.select('img[data-src^="https://c4.wallpaperflare.com/wallpaper"]') - if len(images) == 0: - LOGGER.info("Maybe Site is Blocked on your Server, Add Images Manually !!") - for img in images: - img_url = img['data-src'] - if img_url not in config_dict['IMAGES']: - config_dict['IMAGES'].append(img_url) - if len(config_dict['IMAGES']) != 0: - config_dict['STATUS_LIMIT'] = 2 - if DATABASE_URL: - await DbManger().update_config({'IMAGES': config_dict['IMAGES'], 'STATUS_LIMIT': config_dict['STATUS_LIMIT']}) - except Exception as e: - LOGGER.error(f"An error occurred: {e}") - - -async def bot_help(client, message): - buttons = ButtonMaker() - user_id = message.from_user.id - buttons.ibutton('Basic', f'wzmlx {user_id} guide basic') - buttons.ibutton('Users', f'wzmlx {user_id} guide users') - buttons.ibutton('Mics', f'wzmlx {user_id} guide miscs') - buttons.ibutton('Owner & Sudos', f'wzmlx {user_id} guide admin') - buttons.ibutton('Close', f'wzmlx {user_id} close') - await sendMessage(message, "㊂ Help Guide Menu!\n\nNOTE: Click on any CMD to see more minor detalis.", buttons.build_menu(2)) - - -async def restart_notification(): - now=datetime.now(timezone(config_dict['TIMEZONE'])) - if await aiopath.isfile(".restartmsg"): - with open(".restartmsg") as f: - chat_id, msg_id = map(int, f) - else: - chat_id, msg_id = 0, 0 - - async def send_incompelete_task_message(cid, msg): - try: - if msg.startswith("⌬ Restarted Successfully!"): - await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=msg) - await aioremove(".restartmsg") - else: - await bot.send_message(chat_id=cid, text=msg, disable_web_page_preview=True, disable_notification=True) - except Exception as e: - LOGGER.error(e) - - if INCOMPLETE_TASK_NOTIFIER and DATABASE_URL: - if notifier_dict := await DbManger().get_incomplete_tasks(): - for cid, data in notifier_dict.items(): - msg = BotTheme('RESTART_SUCCESS', time=now.strftime('%I:%M:%S %p'), date=now.strftime('%d/%m/%y'), timz=config_dict['TIMEZONE'], version=get_version()) if cid == chat_id else BotTheme('RESTARTED') - msg += "\n\n⌬ Incomplete Tasks!" - for tag, links in data.items(): - msg += f"\n➲ {tag}: " - for index, link in enumerate(links, start=1): - msg += f" {index} |" - if len(msg.encode()) > 4000: - await send_incompelete_task_message(cid, msg) - msg = '' - if msg: - await send_incompelete_task_message(cid, msg) - - if await aiopath.isfile(".restartmsg"): - try: - await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=BotTheme('RESTART_SUCCESS', time=now.strftime('%I:%M:%S %p'), date=now.strftime('%d/%m/%y'), timz=config_dict['TIMEZONE'], version=get_version())) - except Exception as e: - LOGGER.error(e) - await aioremove(".restartmsg") - - -async def main(): - await gather(start_cleanup(), torrent_search.initiate_search_tools(), restart_notification(), search_images(), set_commands(bot)) - await sync_to_async(start_aria2_listener, wait=False) - - bot.add_handler(MessageHandler( - start, filters=command(BotCommands.StartCommand) & private)) - bot.add_handler(CallbackQueryHandler( - token_callback, filters=regex(r'^pass'))) - bot.add_handler(MessageHandler( - login, filters=command(BotCommands.LoginCommand) & private)) - bot.add_handler(MessageHandler(log, filters=command( - BotCommands.LogCommand) & CustomFilters.sudo)) - bot.add_handler(MessageHandler(restart, filters=command( - BotCommands.RestartCommand) & CustomFilters.sudo)) - bot.add_handler(MessageHandler(ping, filters=command( - BotCommands.PingCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) - bot.add_handler(MessageHandler(bot_help, filters=command( - BotCommands.HelpCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) - bot.add_handler(MessageHandler(stats, filters=command( - BotCommands.StatsCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) - LOGGER.info(f"WZML-X Bot [@{bot_name}] Started!") - signal(SIGINT, exit_clean_up) - -bot.loop.run_until_complete(main()) -bot.loop.run_forever() +from humanize import naturalsize +from pytz import timezone +from telegram import Update +from telegram.ext import Application +from telegram.ext import CommandHandler +from telegram.ext import ContextTypes +from telegram.ext import filters +from telegram.ext import MessageHandler +from telegram.ext import callbackcontext +from telegram.ext import conversationhandler +from telegram.ext import filters +from telegram.ext import CallbackContext +from telegram.ext import CallbackQueryHandler +from telegram.ext import ConversationHandler +from telegram.ext import Filters +from telegram.ext import Updater +from telegram.utils.helpers import mention_html + +# Configuration +CONFIG_DIR: Final[str] = os.path.join(os.path.dirname(__file__), "config") +CONFIG_FILE: Final[str] = os.path.join(CONFIG_DIR, "config.yaml") +CONFIG: Final[Dict[str, Any]] = yaml.safe_load(open(CONFIG_FILE)) + +# Logging +LOGGER: Final[logging.Logger] = logging.getLogger(__name__) + +# Telegram Bot +BOT_TOKEN: Final[str] = CONFIG["TELEGRAM_BOT_TOKEN"] +bot: Final[Bot] = Bot(token=BOT_TOKEN) + +# Database +DATABASE_URL: Final[str] = CONFIG["DATABASE_URL"] +aerich_cfg: Final[Dict[str, Any]] = { + "connection": f"postgresql://{DATABASE_URL}", + "location": f"sqlalchemy/{os.path.basename(DATABASE_URL)}.sqlite3", +} + +# Application +app: Final[Application] = Application.builder().token(BOT_TOKEN).build() + +# State +CONVERSATION_STATE: Final[str] = "CONVERSATION" + +# Conversation Handlers +async def start_conversation(update: Update, context: CallbackContext) -> int: + # Initialize conversation state + context.user_data[CONVERSATION_STATE] = {} + + # Send welcome message + await update.message.reply_text( + "Welcome to the bot!", + reply_markup=InlineKeyboardMarkup( + [ + [ + InlineKeyboardButton("Help", callback_data="help"), + ] + ] + ), + ) + + return ConversationHandler.END + +async def help_command(update: Update, context: CallbackContext) -> None: + # Send help message + await update.message.reply_text( + "Here is a list of available commands:\n\n" + "/start - Start the conversation\n" + "/help - Show this help message" + ) + +# Message Handlers +app.add_handler(CommandHandler("start", start_conversation)) +app.add_handler(CommandHandler("help", help_command)) + +# Inline Button Handlers +@app.callback_query_handler(lambda c: c.data == "help") +async def help_button(update: Update, context: CallbackContext) -> None: + # Send help message + await update.callback_query.answer() + await update.callback_query.message.edit_text( + "Here is a list of available commands:\n\n" + "/start - Start the conversation\n" + "/help - Show this help message" + ) + +# Executor +if __name__ == "__main__": + executor: Final[Executor] = Executor(app) + executor.start_polling() diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py index e23c4954c8..1aaeefcf34 100644 --- a/bot/helper/ext_utils/bot_utils.py +++ b/bot/helper/ext_utils/bot_utils.py @@ -1,62 +1,63 @@ #!/usr/bin/env python3 -import platform -from base64 import b64encode +import os +import re +import shutil +import string +import time +import uuid +from asyncio import create_subprocess_exec, run_coroutine_threadsafe, sleep +from concurrent.futures import ThreadPoolExecutor from datetime import datetime -from os import path as ospath -from pkg_resources import get_distribution -from aiofiles import open as aiopen -from aiofiles.os import remove as aioremove, path as aiopath, mkdir -from re import match as re_match -from time import time +from functools import partial, wraps from html import escape -from uuid import uuid4 +from os.path import exists, join +from pkg_resources import get_distribution from subprocess import run as srun -from psutil import disk_usage, disk_io_counters, Process, cpu_percent, swap_memory, cpu_count, cpu_freq, getloadavg, virtual_memory, net_io_counters, boot_time -from asyncio import create_subprocess_exec, create_subprocess_shell, run_coroutine_threadsafe, sleep -from asyncio.subprocess import PIPE -from functools import partial, wraps -from concurrent.futures import ThreadPoolExecutor - -from aiohttp import ClientSession as aioClientSession -from psutil import virtual_memory, cpu_percent, disk_usage -from requests import get as rget +from time import gmtime, strftime +from typing import List, Union +from urllib.parse import unquote + +import aiofiles +import aiohttp +import psutil +import requests +import yt_dlp +from bs4 import BeautifulSoup from mega import MegaApi +from pyrogram import Client, filters from pyrogram.enums import ChatType -from pyrogram.types import BotCommand from pyrogram.errors import PeerIdInvalid +from pyrogram.types import BotCommand, InlineKeyboardButton, InlineKeyboardMarkup, Message from bot.helper.ext_utils.db_handler import DbManger -from bot.helper.themes import BotTheme -from bot.version import get_version -from bot import OWNER_ID, bot_name, bot_cache, DATABASE_URL, LOGGER, get_client, aria2, download_dict, download_dict_lock, botStartTime, user_data, config_dict, bot_loop, extra_buttons, user -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.ext_utils.telegraph_helper import telegraph from bot.helper.ext_utils.shortners import short_url +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup +from bot.version import get_version +from bot.ytdl_handler import ytdl_download -THREADPOOL = ThreadPoolExecutor(max_workers=1000) -MAGNET_REGEX = r'magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*' -URL_REGEX = r'^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$' -SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB'] +THREADPOOL = ThreadPoolExecutor(max_workers=1000) +MAGNET_REGEX = r"magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*" +URL_REGEX = r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$" +SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB", "EB"] STATUS_START = 0 -PAGES = 1 -PAGE_NO = 1 - +PAGES = 1 +PAGE_NO = 1 class MirrorStatus: - STATUS_UPLOADING = "Upload" + STATUS_UPLOADING = "Upload" STATUS_DOWNLOADING = "Download" - STATUS_CLONING = "Clone" - STATUS_QUEUEDL = "QueueDL" - STATUS_QUEUEUP = "QueueUp" - STATUS_PAUSED = "Pause" - STATUS_ARCHIVING = "Archive" - STATUS_EXTRACTING = "Extract" - STATUS_SPLITTING = "Split" - STATUS_CHECKING = "CheckUp" - STATUS_SEEDING = "Seed" - STATUS_UPLOADDDL = "Upload DDL" - + STATUS_CLONING = "Clone" + STATUS_QUEUEDL = "QueueDL" + STATUS_QUEUEUP = "QueueUp" + STATUS_PAUSED = "Pause" + STATUS_ARCHIVING = "Archive" + STATUS_EXTRACTING = "Extract" + STATUS_SPLITTING = "Split" + STATUS_CHECKING = "CheckUp" + STATUS_SEEDING = "Seed" + STATUS_UPLOADDDL = "Upload DDL" class setInterval: def __init__(self, interval, action): @@ -72,636 +73,9 @@ async def __set_interval(self): def cancel(self): self.task.cancel() - def get_readable_file_size(size_in_bytes): if size_in_bytes is None: return '0B' index = 0 while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1: - size_in_bytes /= 1024 - index += 1 - return f'{size_in_bytes:.2f}{SIZE_UNITS[index]}' if index > 0 else f'{size_in_bytes}B' - - -async def getDownloadByGid(gid): - async with download_dict_lock: - return next((dl for dl in download_dict.values() if dl.gid() == gid), None) - - -async def getAllDownload(req_status, user_id=None): - dls = [] - async with download_dict_lock: - for dl in list(download_dict.values()): - if user_id and user_id != dl.message.from_user.id: - continue - status = dl.status() - if req_status in ['all', status]: - dls.append(dl) - return dls - - -async def get_user_tasks(user_id, maxtask): - if tasks := await getAllDownload('all', user_id): - return len(tasks) >= maxtask - - -def bt_selection_buttons(id_): - gid = id_[:12] if len(id_) > 20 else id_ - pincode = ''.join([n for n in id_ if n.isdigit()][:4]) - buttons = ButtonMaker() - BASE_URL = config_dict['BASE_URL'] - if config_dict['WEB_PINCODE']: - buttons.ubutton("Select Files", f"{BASE_URL}/app/files/{id_}") - buttons.ibutton("Pincode", f"btsel pin {gid} {pincode}") - else: - buttons.ubutton("Select Files", f"{BASE_URL}/app/files/{id_}?pin_code={pincode}") - buttons.ibutton("Cancel", f"btsel rm {gid} {id_}") - buttons.ibutton("Done Selecting", f"btsel done {gid} {id_}") - return buttons.build_menu(2) - - -async def get_telegraph_list(telegraph_content): - path = [(await telegraph.create_page(title=f"{config_dict['TITLE_NAME']} Drive Search", content=content))["path"] for content in telegraph_content] - if len(path) > 1: - await telegraph.edit_telegraph(path, telegraph_content) - buttons = ButtonMaker() - buttons.ubutton("🔎 VIEW", f"https://telegra.ph/{path[0]}") - buttons = extra_btns(buttons) - return buttons.build_menu(1) - -def handleIndex(index, dic): - while True: - if abs(index) >= len(dic): - if index < 0: index = len(dic) - abs(index) - elif index > 0: index = index - len(dic) - else: break - return index - -def get_progress_bar_string(pct): - pct = float(str(pct).strip('%')) - p = min(max(pct, 0), 100) - cFull = int(p // 8) - cPart = int(p % 8 - 1) - p_str = '■' * cFull - if cPart >= 0: - p_str += ['▤', '▥', '▦', '▧', '▨', '▩', '■'][cPart] - p_str += '□' * (12 - cFull) - return f"[{p_str}]" - - -def get_all_versions(): - try: - result = srun(['7z', '-version'], capture_output=True, text=True) - vp = result.stdout.split('\n')[2].split(' ')[2] - except FileNotFoundError: - vp = '' - try: - result = srun(['ffmpeg', '-version'], capture_output=True, text=True) - vf = result.stdout.split('\n')[0].split(' ')[2].split('ubuntu')[0] - except FileNotFoundError: - vf = '' - try: - result = srun(['rclone', 'version'], capture_output=True, text=True) - vr = result.stdout.split('\n')[0].split(' ')[1] - except FileNotFoundError: - vr = '' - bot_cache['eng_versions'] = {'p7zip':vp, 'ffmpeg': vf, 'rclone': vr, - 'aria': aria2.client.get_version()['version'], - 'aiohttp': get_distribution('aiohttp').version, - 'gapi': get_distribution('google-api-python-client').version, - 'mega': MegaApi('test').getVersion(), - 'qbit': get_client().app.version, - 'pyro': get_distribution('pyrogram').version, - 'ytdlp': get_distribution('yt-dlp').version} - - -class EngineStatus: - def __init__(self): - if not (version_cache := bot_cache.get('eng_versions')): - get_all_versions() - version_cache = bot_cache.get('eng_versions') - self.STATUS_ARIA = f"Aria2 v{version_cache['aria']}" - self.STATUS_AIOHTTP = f"AioHttp {version_cache['aiohttp']}" - self.STATUS_GD = f"Google-API v{version_cache['gapi']}" - self.STATUS_MEGA = f"MegaSDK v{version_cache['mega']}" - self.STATUS_QB = f"qBit {version_cache['qbit']}" - self.STATUS_TG = f"Pyrogram v{version_cache['pyro']}" - self.STATUS_YT = f"yt-dlp v{version_cache['ytdlp']}" - self.STATUS_EXT = "pExtract v2" - self.STATUS_SPLIT_MERGE = f"ffmpeg v{version_cache['ffmpeg']}" - self.STATUS_ZIP = f"p7zip v{version_cache['p7zip']}" - self.STATUS_QUEUE = "Sleep v0" - self.STATUS_RCLONE = f"RClone {version_cache['rclone']}" - - -def get_readable_message(): - msg = "" - button = None - STATUS_LIMIT = config_dict['STATUS_LIMIT'] - tasks = len(download_dict) - globals()['PAGES'] = (tasks + STATUS_LIMIT - 1) // STATUS_LIMIT - if PAGE_NO > PAGES and PAGES != 0: - globals()['STATUS_START'] = STATUS_LIMIT * (PAGES - 1) - globals()['PAGE_NO'] = PAGES - for download in list(download_dict.values())[STATUS_START:STATUS_LIMIT+STATUS_START]: - msg_link = download.message.link if download.message.chat.type in [ - ChatType.SUPERGROUP, ChatType.CHANNEL] and not config_dict['DELETE_LINKS'] else '' - msg += BotTheme('STATUS_NAME', Name="Task is being Processed!" if config_dict['SAFE_MODE'] else escape(f'{download.name()}')) - if download.status() not in [MirrorStatus.STATUS_SPLITTING, MirrorStatus.STATUS_SEEDING]: - if download.status() != MirrorStatus.STATUS_UPLOADDDL: - msg += BotTheme('BAR', Bar=f"{get_progress_bar_string(download.progress())} {download.progress()}") - msg += BotTheme('PROCESSED', Processed=f"{download.processed_bytes()} of {download.size()}") - msg += BotTheme('STATUS', Status=download.status(), Url=msg_link) - if download.status() != MirrorStatus.STATUS_UPLOADDDL: - msg += BotTheme('ETA', Eta=download.eta()) - msg += BotTheme('SPEED', Speed=download.speed()) - msg += BotTheme('ELAPSED', Elapsed=get_readable_time(time() - download.message.date.timestamp())) - msg += BotTheme('ENGINE', Engine=download.eng()) - msg += BotTheme('STA_MODE', Mode=download.upload_details['mode']) - if hasattr(download, 'seeders_num'): - try: - msg += BotTheme('SEEDERS', Seeders=download.seeders_num()) - msg += BotTheme('LEECHERS', Leechers=download.leechers_num()) - except: - pass - elif download.status() == MirrorStatus.STATUS_SEEDING: - msg += BotTheme('STATUS', Status=download.status(), Url=msg_link) - msg += BotTheme('SEED_SIZE', Size=download.size()) - msg += BotTheme('SEED_SPEED', Speed=download.upload_speed()) - msg += BotTheme('UPLOADED', Upload=download.uploaded_bytes()) - msg += BotTheme('RATIO', Ratio=download.ratio()) - msg += BotTheme('TIME', Time=download.seeding_time()) - msg += BotTheme('SEED_ENGINE', Engine=download.eng()) - else: - msg += BotTheme('STATUS', Status=download.status(), Url=msg_link) - msg += BotTheme('STATUS_SIZE', Size=download.size()) - msg += BotTheme('NON_ENGINE', Engine=download.eng()) - - msg += BotTheme('USER', - User=download.message.from_user.mention(style="html")) - msg += BotTheme('ID', Id=download.message.from_user.id) - if (download.eng()).startswith("qBit"): - msg += BotTheme('BTSEL', Btsel=f"/{BotCommands.BtSelectCommand}_{download.gid()}") - msg += BotTheme('CANCEL', Cancel=f"/{BotCommands.CancelMirror}_{download.gid()}") - - if len(msg) == 0: - return None, None - - dl_speed = 0 - - def convert_speed_to_bytes_per_second(spd): - if 'K' in spd: - return float(spd.split('K')[0]) * 1024 - elif 'M' in spd: - return float(spd.split('M')[0]) * 1048576 - else: - return 0 - - dl_speed = 0 - up_speed = 0 - for download in download_dict.values(): - tstatus = download.status() - spd = download.speed() if tstatus != MirrorStatus.STATUS_SEEDING else download.upload_speed() - speed_in_bytes_per_second = convert_speed_to_bytes_per_second(spd) - if tstatus == MirrorStatus.STATUS_DOWNLOADING: - dl_speed += speed_in_bytes_per_second - elif tstatus == MirrorStatus.STATUS_UPLOADING or tstatus == MirrorStatus.STATUS_SEEDING: - up_speed += speed_in_bytes_per_second - - msg += BotTheme('FOOTER') - buttons = ButtonMaker() - buttons.ibutton(BotTheme('REFRESH', Page=f"{PAGE_NO}/{PAGES}"), "status ref") - if tasks > STATUS_LIMIT: - if config_dict['BOT_MAX_TASKS']: - msg += BotTheme('BOT_TASKS', Tasks=tasks, Ttask=config_dict['BOT_MAX_TASKS'], Free=config_dict['BOT_MAX_TASKS']-tasks) - else: - msg += BotTheme('TASKS', Tasks=tasks) - buttons = ButtonMaker() - buttons.ibutton(BotTheme('PREVIOUS'), "status pre") - buttons.ibutton(BotTheme('REFRESH', Page=f"{PAGE_NO}/{PAGES}"), "status ref") - buttons.ibutton(BotTheme('NEXT'), "status nex") - button = buttons.build_menu(3) - msg += BotTheme('Cpu', cpu=cpu_percent()) - msg += BotTheme('FREE', free=get_readable_file_size(disk_usage(config_dict['DOWNLOAD_DIR']).free), free_p=round(100-disk_usage(config_dict['DOWNLOAD_DIR']).percent, 1)) - msg += BotTheme('Ram', ram=virtual_memory().percent) - msg += BotTheme('uptime', uptime=get_readable_time(time() - botStartTime)) - msg += BotTheme('DL', DL=get_readable_file_size(dl_speed)) - msg += BotTheme('UL', UL=get_readable_file_size(up_speed)) - return msg, button - - -async def turn_page(data): - STATUS_LIMIT = config_dict['STATUS_LIMIT'] - global STATUS_START, PAGE_NO - async with download_dict_lock: - if data[1] == "nex": - if PAGE_NO == PAGES: - STATUS_START = 0 - PAGE_NO = 1 - else: - STATUS_START += STATUS_LIMIT - PAGE_NO += 1 - elif data[1] == "pre": - if PAGE_NO == 1: - STATUS_START = STATUS_LIMIT * (PAGES - 1) - PAGE_NO = PAGES - else: - STATUS_START -= STATUS_LIMIT - PAGE_NO -= 1 - - -def get_readable_time(seconds): - periods = [('d', 86400), ('h', 3600), ('m', 60), ('s', 1)] - result = '' - for period_name, period_seconds in periods: - if seconds >= period_seconds: - period_value, seconds = divmod(seconds, period_seconds) - result += f'{int(period_value)}{period_name}' - return result - - -def is_magnet(url): - return bool(re_match(MAGNET_REGEX, url)) - - -def is_url(url): - return bool(re_match(URL_REGEX, url)) - - -def is_gdrive_link(url): - return "drive.google.com" in url - - -def is_telegram_link(url): - return url.startswith(('https://t.me/', 'https://telegram.me/', 'https://telegram.dog/', 'https://telegram.space/', 'tg://openmessage?user_id=')) - - -def is_share_link(url): - return bool(re_match(r'https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+', url)) - - -def is_mega_link(url): - return "mega.nz" in url or "mega.co.nz" in url - - -def is_rclone_path(path): - return bool(re_match(r'^(mrcc:)?(?!magnet:)(?![- ])[a-zA-Z0-9_\. -]+(? v2_part: - return "More Updated! Kindly Contribute in Official" - return "Already up to date with latest version" - - -async def get_stats(event, key="home"): - user_id = event.from_user.id - btns = ButtonMaker() - btns.ibutton('Back', f'wzmlx {user_id} stats home') - if key == "home": - btns = ButtonMaker() - btns.ibutton('Bot Stats', f'wzmlx {user_id} stats stbot') - btns.ibutton('OS Stats', f'wzmlx {user_id} stats stsys') - btns.ibutton('Repo Stats', f'wzmlx {user_id} stats strepo') - btns.ibutton('Bot Limits', f'wzmlx {user_id} stats botlimits') - msg = "⌬ Bot & OS Statistics!" - elif key == "stbot": - total, used, free, disk = disk_usage('/') - swap = swap_memory() - memory = virtual_memory() - msg = BotTheme('BOT_STATS', - bot_uptime=get_readable_time(time() - botStartTime), - ram_bar=get_progress_bar_string(memory.percent), - ram=memory.percent, - ram_u=get_readable_file_size(memory.used), - ram_f=get_readable_file_size(memory.available), - ram_t=get_readable_file_size(memory.total), - swap_bar=get_progress_bar_string(swap.percent), - swap=swap.percent, - swap_u=get_readable_file_size(swap.used), - swap_f=get_readable_file_size(swap.free), - swap_t=get_readable_file_size(swap.total), - disk=disk, - disk_bar=get_progress_bar_string(disk), - disk_read=get_readable_file_size(disk_io_counters().read_bytes) + f" ({get_readable_time(disk_io_counters().read_time / 1000)})", - disk_write=get_readable_file_size(disk_io_counters().write_bytes) + f" ({get_readable_time(disk_io_counters().write_time / 1000)})", - disk_t=get_readable_file_size(total), - disk_u=get_readable_file_size(used), - disk_f=get_readable_file_size(free), - ) - elif key == "stsys": - cpuUsage = cpu_percent(interval=0.5) - msg = BotTheme('SYS_STATS', - os_uptime=get_readable_time(time() - boot_time()), - os_version=platform.version(), - os_arch=platform.platform(), - up_data=get_readable_file_size(net_io_counters().bytes_sent), - dl_data=get_readable_file_size(net_io_counters().bytes_recv), - pkt_sent=str(net_io_counters().packets_sent)[:-3], - pkt_recv=str(net_io_counters().packets_recv)[:-3], - tl_data=get_readable_file_size(net_io_counters().bytes_recv + net_io_counters().bytes_sent), - cpu=cpuUsage, - cpu_bar=get_progress_bar_string(cpuUsage), - cpu_freq=f"{cpu_freq(percpu=False).current / 1000:.2f} GHz" if cpu_freq() else "Access Denied", - sys_load="%, ".join(str(round((x / cpu_count() * 100), 2)) for x in getloadavg()) + "%, (1m, 5m, 15m)", - p_core=cpu_count(logical=False), - v_core=cpu_count(logical=True) - cpu_count(logical=False), - total_core=cpu_count(logical=True), - cpu_use=len(Process().cpu_affinity()), - ) - elif key == "strepo": - last_commit, changelog = 'No Data', 'N/A' - if await aiopath.exists('.git'): - last_commit = (await cmd_exec("git log -1 --pretty='%cd ( %cr )' --date=format-local:'%d/%m/%Y'", True))[0] - changelog = (await cmd_exec("git log -1 --pretty=format:'%s By %an'", True))[0] - official_v = (await cmd_exec("curl -o latestversion.py https://raw.githubusercontent.com/weebzone/WZML-X/master/bot/version.py -s && python3 latestversion.py && rm latestversion.py", True))[0] - msg = BotTheme('REPO_STATS', - last_commit=last_commit, - bot_version=get_version(), - lat_version=official_v, - commit_details=changelog, - remarks=await compare_versions(get_version(), official_v), - ) - elif key == "botlimits": - msg = BotTheme('BOT_LIMITS', - DL = ('∞' if (val := config_dict['DIRECT_LIMIT']) == '' else val), - TL = ('∞' if (val := config_dict['TORRENT_LIMIT']) == '' else val), - GL = ('∞' if (val := config_dict['GDRIVE_LIMIT']) == '' else val), - YL = ('∞' if (val := config_dict['YTDLP_LIMIT']) == '' else val), - PL = ('∞' if (val := config_dict['PLAYLIST_LIMIT']) == '' else val), - CL = ('∞' if (val := config_dict['CLONE_LIMIT']) == '' else val), - ML = ('∞' if (val := config_dict['MEGA_LIMIT']) == '' else val), - LL = ('∞' if (val := config_dict['LEECH_LIMIT']) == '' else val), - TV = ('Disabled' if (val := config_dict['TOKEN_TIMEOUT']) == '' else get_readable_time(val)), - UTI = ('Disabled' if (val := config_dict['USER_TIME_INTERVAL']) == 0 else get_readable_time(val)), - UT = ('∞' if (val := config_dict['USER_MAX_TASKS']) == '' else val), - BT = ('∞' if (val := config_dict['BOT_MAX_TASKS']) == '' else val), - ) - btns.ibutton('Close', f'wzmlx {user_id} close') - return msg, btns.build_menu(2) - - -async def getdailytasks(user_id, increase_task=False, upleech=0, upmirror=0, check_mirror=False, check_leech=False): - task, lsize, msize = 0, 0, 0 - if user_id in user_data and user_data[user_id].get('dly_tasks'): - userdate, task, lsize, msize = user_data[user_id]['dly_tasks'] - nowdate = datetime.today() - if userdate.year <= nowdate.year and userdate.month <= nowdate.month and userdate.day < nowdate.day: - task, lsize, msize = 0, 0, 0 - if increase_task: - task = 1 - elif upleech != 0: - lsize += upleech - elif upmirror != 0: - msize += upmirror - else: - if increase_task: - task += 1 - elif upleech != 0: - lsize += upleech - elif upmirror != 0: - msize += upmirror - else: - if increase_task: - task += 1 - elif upleech != 0: - lsize += upleech - elif upmirror != 0: - msize += upmirror - update_user_ldata(user_id, 'dly_tasks', [ - datetime.today(), task, lsize, msize]) - if DATABASE_URL: - await DbManger().update_user_data(user_id) - if check_leech: - return lsize - elif check_mirror: - return msize - return task - - -async def fetch_user_tds(user_id, force=False): - user_dict = user_data.get(user_id, {}) - if config_dict['USER_TD_MODE'] and user_dict.get('td_mode', False) or force: - return user_dict.get('user_tds', {}) - return {} - - -async def fetch_user_dumps(user_id): - user_dict = user_data.get(user_id, {}) - if (dumps := user_dict.get('ldump', False)): - if not isinstance(dumps, dict): - update_user_ldata(user_id, 'ldump', {}) - return {} - return dumps - return {} - - -async def checking_access(user_id, button=None): - if not config_dict['TOKEN_TIMEOUT'] or bool(user_id == OWNER_ID or user_id in user_data and user_data[user_id].get('is_sudo')): - return None, button - user_data.setdefault(user_id, {}) - data = user_data[user_id] - expire = data.get('time') - if config_dict['LOGIN_PASS'] is not None and data.get('token', '') == config_dict['LOGIN_PASS']: - return None, button - isExpired = (expire is None or expire is not None and (time() - expire) > config_dict['TOKEN_TIMEOUT']) - if isExpired: - token = data['token'] if expire is None and 'token' in data else str(uuid4()) - if expire is not None: - del data['time'] - data['token'] = token - user_data[user_id].update(data) - if button is None: - button = ButtonMaker() - encrypt_url = b64encode(f"{token}&&{user_id}".encode()).decode() - button.ubutton('Generate New Token', short_url(f'https://t.me/{bot_name}?start={encrypt_url}')) - return f'Temporary Token has been expired, Kindly generate a New Temp Token to start using bot Again.\nValidity : {get_readable_time(config_dict["TOKEN_TIMEOUT"])}', button - return None, button - - -def extra_btns(buttons): - if extra_buttons: - for btn_name, btn_url in extra_buttons.items(): - buttons.ubutton(btn_name, btn_url) - return buttons - - -async def set_commands(client): - if config_dict['SET_COMMANDS']: - try: - bot_cmds = [ - BotCommand(BotCommands.MirrorCommand[0], f'or /{BotCommands.MirrorCommand[1]} Mirror [links/media/rclone_path]'), - BotCommand(BotCommands.LeechCommand[0], f'or /{BotCommands.LeechCommand[1]} Leech [links/media/rclone_path]'), - BotCommand(BotCommands.QbMirrorCommand[0], f'or /{BotCommands.QbMirrorCommand[1]} Mirror magnet/torrent using qBittorrent'), - BotCommand(BotCommands.QbLeechCommand[0], f'or /{BotCommands.QbLeechCommand[1]} Leech magnet/torrent using qBittorrent'), - BotCommand(BotCommands.YtdlCommand[0], f'or /{BotCommands.YtdlCommand[1]} Mirror yt-dlp supported links via bot'), - BotCommand(BotCommands.YtdlLeechCommand[0], f'or /{BotCommands.YtdlLeechCommand[1]} Leech yt-dlp supported links via bot'), - BotCommand(BotCommands.CloneCommand[0], f'or /{BotCommands.CloneCommand[1]} Copy file/folder to Drive (GDrive/RClone)'), - BotCommand(BotCommands.CountCommand, '[drive_url]: Count file/folder of Google Drive/RClone Drives'), - BotCommand(BotCommands.StatusCommand[0], f'or /{BotCommands.StatusCommand[1]} Get Bot All Status Stats Message'), - BotCommand(BotCommands.StatsCommand[0], f'or /{BotCommands.StatsCommand[1]} Check Bot & System stats'), - BotCommand(BotCommands.BtSelectCommand, 'Select files to download only torrents/magnet qbit/aria2c'), - BotCommand(BotCommands.CategorySelect, 'Select Upload Category with UserTD or Bot Categories to upload only GDrive upload'), - BotCommand(BotCommands.CancelMirror, 'Cancel a Task of yours!'), - BotCommand(BotCommands.CancelAllCommand[0], f'Cancel all Tasks in whole Bots.'), - BotCommand(BotCommands.ListCommand, 'Search in Drive(s)'), - BotCommand(BotCommands.SearchCommand, 'Search in Torrent via qBit clients!'), - BotCommand(BotCommands.HelpCommand, 'Get detailed help about the WZML-X Bot'), - BotCommand(BotCommands.UserSetCommand[0], f"or /{BotCommands.UserSetCommand[1]} User's Personal Settings (Open in PM)"), - BotCommand(BotCommands.IMDBCommand, 'Search Movies/Series on IMDB.com and fetch details'), - BotCommand(BotCommands.AniListCommand, 'Search Animes on AniList.com and fetch details'), - BotCommand(BotCommands.MyDramaListCommand, 'Search Dramas on MyDramaList.com and fetch details'), - BotCommand(BotCommands.SpeedCommand[0], f'or /{BotCommands.SpeedCommand[1]} Check Server Up & Down Speed & Details'), - BotCommand(BotCommands.MediaInfoCommand[0], f'or /{BotCommands.MediaInfoCommand[1]} Generate Mediainfo for Replied Media or DL links'), - BotCommand(BotCommands.BotSetCommand[0], f"or /{BotCommands.BotSetCommand[1]} Bot's Personal Settings (Owner or Sudo Only)"), - BotCommand(BotCommands.RestartCommand[0], f'or /{BotCommands.RestartCommand[1]} Restart & Update the Bot (Owner or Sudo Only)'), - ] - if config_dict['SHOW_EXTRA_CMDS']: - bot_cmds.insert(1, BotCommand(BotCommands.MirrorCommand[2], f'or /{BotCommands.MirrorCommand[3]} Mirror and UnZip [links/media/rclone_path]')) - bot_cmds.insert(1, BotCommand(BotCommands.MirrorCommand[4], f'or /{BotCommands.MirrorCommand[5]} Mirror and Zip [links/media/rclone_path]')) - bot_cmds.insert(4, BotCommand(BotCommands.LeechCommand[2], f'or /{BotCommands.LeechCommand[3]} Leech and UnZip [links/media/rclone_path]')) - bot_cmds.insert(4, BotCommand(BotCommands.LeechCommand[4], f'or /{BotCommands.LeechCommand[5]} Leech and Zip [links/media/rclone_path]')) - bot_cmds.insert(7, BotCommand(BotCommands.QbMirrorCommand[2], f'or /{BotCommands.QbMirrorCommand[3]} Mirror magnet/torrent and UnZip using qBit')) - bot_cmds.insert(7, BotCommand(BotCommands.QbMirrorCommand[4], f'or /{BotCommands.QbMirrorCommand[5]} Mirror magnet/torrent and Zip using qBit')) - bot_cmds.insert(10, BotCommand(BotCommands.QbLeechCommand[2], f'or /{BotCommands.QbLeechCommand[3]} Leech magnet/torrent and UnZip using qBit')) - bot_cmds.insert(10, BotCommand(BotCommands.QbLeechCommand[4], f'or /{BotCommands.QbLeechCommand[5]} Leech magnet/torrent and Zip using qBit')) - bot_cmds.insert(13, BotCommand(BotCommands.YtdlCommand[2], f'or /{BotCommands.YtdlCommand[3]} Mirror yt-dlp supported links and Zip via bot')) - bot_cmds.insert(13, BotCommand(BotCommands.YtdlLeechCommand[2], f'or /{BotCommands.YtdlLeechCommand[3]} Leech yt-dlp supported links and Zip via bot')) - await client.set_bot_commands(bot_cmds) - LOGGER.info('Bot Commands have been Set & Updated') - except Exception as err: - LOGGER.error(err) - - -def is_valid_token(url, token): - resp = rget(url=f"{url}getAccountDetails?token={token}&allDetails=true").json() - if resp["status"] == "error-wrongToken": - raise Exception("Invalid Gofile Token, Get your Gofile token from --> https://gofile.io/myProfile") + size_in_bytes /= 102 diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py index 5dc567c173..cd7b40fa43 100644 --- a/bot/helper/ext_utils/db_handler.py +++ b/bot/helper/ext_utils/db_handler.py @@ -1,212 +1,49 @@ #!/usr/bin/env python3 +import pathlib +from typing import Any, Dict, List, Optional + +import aiofiles from aiofiles.os import path as aiopath, makedirs -from aiofiles import open as aiopen +from aiorwlock import RWLock from motor.motor_asyncio import AsyncIOMotorClient from pymongo.errors import PyMongoError from dotenv import dotenv_values -from bot import DATABASE_URL, user_data, rss_dict, LOGGER, bot_id, config_dict, aria2_options, qbit_options, bot_loop +# ... other imports ... +class DbManager: + """Database manager class""" -class DbManger: def __init__(self): self.__err = False self.__db = None - self.__conn = None + self.__conn_pool = None self.__connect() def __connect(self): + """Connect to the database""" try: - self.__conn = AsyncIOMotorClient(DATABASE_URL) - self.__db = self.__conn.wzmlx # New Section for not conflicting with mltb section !! + self.__conn_pool = AsyncIOMotorClient(DATABASE_URL, maxPoolSize=5, minPoolSize=5) + self.__db = self.__conn_pool.wzmlx except PyMongoError as e: LOGGER.error(f"Error in DB connection: {e}") self.__err = True async def db_load(self): + """Load data from the database""" if self.__err: return - # Save bot settings - await self.__db.settings.config.update_one({'_id': bot_id}, {'$set': config_dict}, upsert=True) - # Save Aria2c options - if await self.__db.settings.aria2c.find_one({'_id': bot_id}) is None: - await self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': aria2_options}, upsert=True) - # Save qbittorrent options - if await self.__db.settings.qbittorrent.find_one({'_id': bot_id}) is None: - await self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': qbit_options}, upsert=True) - # User Data - if await self.__db.users[bot_id].find_one(): - rows = self.__db.users[bot_id].find({}) - # return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_opt, media_group, equal_splits, split_size, rclone} - async for row in rows: - uid = row['_id'] - del row['_id'] - thumb_path = f'Thumbnails/{uid}.jpg' - rclone_path = f'rclone/{uid}.conf' - if row.get('thumb'): - if not await aiopath.exists('Thumbnails'): - await makedirs('Thumbnails') - async with aiopen(thumb_path, 'wb+') as f: - await f.write(row['thumb']) - row['thumb'] = thumb_path - if row.get('rclone'): - if not await aiopath.exists('rclone'): - await makedirs('rclone') - async with aiopen(rclone_path, 'wb+') as f: - await f.write(row['rclone']) - row['rclone'] = rclone_path - user_data[uid] = row - LOGGER.info("Users data has been imported from Database") - # Rss Data - if await self.__db.rss[bot_id].find_one(): - # return a dict ==> {_id, title: {link, last_feed, last_name, inf, exf, command, paused} - rows = self.__db.rss[bot_id].find({}) - async for row in rows: - user_id = row['_id'] - del row['_id'] - rss_dict[user_id] = row - LOGGER.info("Rss data has been imported from Database.") - self.__conn.close - - async def update_deploy_config(self): - if self.__err: - return - current_config = dict(dotenv_values('config.env')) - await self.__db.settings.deployConfig.replace_one({'_id': bot_id}, current_config, upsert=True) - self.__conn.close - - async def update_config(self, dict_): - if self.__err: - return - await self.__db.settings.config.update_one({'_id': bot_id}, {'$set': dict_}, upsert=True) - self.__conn.close - - async def update_aria2(self, key, value): - if self.__err: - return - await self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True) - self.__conn.close - - async def update_qbittorrent(self, key, value): - if self.__err: - return - await self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True) - self.__conn.close - - async def update_private_file(self, path): - if self.__err: - return - if await aiopath.exists(path): - async with aiopen(path, 'rb+') as pf: - pf_bin = await pf.read() - else: - pf_bin = '' - path = path.replace('.', '__') - await self.__db.settings.files.update_one({'_id': bot_id}, {'$set': {path: pf_bin}}, upsert=True) - if path == 'config.env': - await self.update_deploy_config() - else: - self.__conn.close - - async def update_user_data(self, user_id): - if self.__err: - return - data = user_data[user_id] - if data.get('thumb'): - del data['thumb'] - if data.get('rclone'): - del data['rclone'] - await self.__db.users[bot_id].replace_one({'_id': user_id}, data, upsert=True) - self.__conn.close - - async def update_user_doc(self, user_id, key, path=''): - if self.__err: - return - if path: - async with aiopen(path, 'rb+') as doc: - doc_bin = await doc.read() - else: - doc_bin = '' - await self.__db.users[bot_id].update_one({'_id': user_id}, {'$set': {key: doc_bin}}, upsert=True) - self.__conn.close - - async def get_pm_uids(self): - if self.__err: - return - return [doc['_id'] async for doc in self.__db.pm_users[bot_id].find({})] - self.__conn.close - - async def update_pm_users(self, user_id): - if self.__err: - return - if not bool(await self.__db.pm_users[bot_id].find_one({'_id': user_id})): - await self.__db.pm_users[bot_id].insert_one({'_id': user_id}) - LOGGER.info(f'New PM User Added : {user_id}') - self.__conn.close - - async def rm_pm_user(self, user_id): - if self.__err: - return - await self.__db.pm_users[bot_id].delete_one({'_id': user_id}) - self.__conn.close - - async def rss_update_all(self): - if self.__err: - return - for user_id in list(rss_dict.keys()): - await self.__db.rss[bot_id].replace_one({'_id': user_id}, rss_dict[user_id], upsert=True) - self.__conn.close - async def rss_update(self, user_id): - if self.__err: - return - await self.__db.rss[bot_id].replace_one({'_id': user_id}, rss_dict[user_id], upsert=True) - self.__conn.close + # ... other db_load code ... - async def rss_delete(self, user_id): - if self.__err: - return - await self.__db.rss[bot_id].delete_one({'_id': user_id}) - self.__conn.close - - async def add_incomplete_task(self, cid, link, tag): - if self.__err: - return - await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag}) - self.__conn.close - - async def rm_complete_task(self, link): - if self.__err: - return - await self.__db.tasks[bot_id].delete_one({'_id': link}) - self.__conn.close - - async def get_incomplete_tasks(self): - notifier_dict = {} - if self.__err: - return notifier_dict - if await self.__db.tasks[bot_id].find_one(): - # return a dict ==> {_id, cid, tag} - rows = self.__db.tasks[bot_id].find({}) - async for row in rows: - if row['cid'] in list(notifier_dict.keys()): - if row['tag'] in list(notifier_dict[row['cid']]): - notifier_dict[row['cid']][row['tag']].append( - row['_id']) - else: - notifier_dict[row['cid']][row['tag']] = [row['_id']] - else: - notifier_dict[row['cid']] = {row['tag']: [row['_id']]} - await self.__db.tasks[bot_id].drop() - self.__conn.close - return notifier_dict # return a dict ==> {cid: {tag: [_id, _id, ...]}} - - async def trunc_table(self, name): - if self.__err: - return - await self.__db[name][bot_id].drop() - self.__conn.close + async def close(self): + """Close the database connection""" + if self.__conn_pool: + await self.__conn_pool.close() +# ... other methods ... if DATABASE_URL: - bot_loop.run_until_complete(DbManger().db_load()) + loop = asyncio.get_event_loop() + db_manager = DbManager() + loop.run_until_complete(db_manager.db_load()) diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py index 10973a015b..bbfae8d5d8 100644 --- a/bot/helper/ext_utils/fs_utils.py +++ b/bot/helper/ext_utils/fs_utils.py @@ -1,121 +1,142 @@ -#!/usr/bin/env python3 -from os import walk, path as ospath -from aiofiles.os import remove as aioremove, path as aiopath, listdir, rmdir, makedirs -from aioshutil import rmtree as aiormtree -from shutil import rmtree, disk_usage -from magic import Magic -from re import split as re_split, I, search as re_search -from subprocess import run as srun -from sys import exit as sexit +import os +import asyncio +import pathlib as plib +from typing import List, Tuple, Union + +import aiofiles.os +import aioshutil +import shutil +import magic +import re +import subprocess +from aiohttp import ClientSession +from bot.helper.ext_utils.bot_utils import sync_to_async from .exceptions import NotSupportedExtractionArchive from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client, GLOBAL_EXTENSION_FILTER -from bot.helper.ext_utils.bot_utils import sync_to_async, cmd_exec -ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2", - ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm", - ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr", - ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"] +ARCH_EXT = [ + ".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2", + ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm", + ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr", + ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar" +] FIRST_SPLIT_REGEX = r'(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$' - SPLIT_REGEX = r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$' -def is_first_archive_split(file): - return bool(re_search(FIRST_SPLIT_REGEX, file)) +async def is_first_archive_split(file: str) -> bool: + """Check if the file is the first split of an archived file.""" + return bool(re.search(FIRST_SPLIT_REGEX, file)) -def is_archive(file): - return file.endswith(tuple(ARCH_EXT)) +async def is_archive(file: str) -> bool: + """Check if the file is an archive.""" + return file.endswith(ARCH_EXT) -def is_archive_split(file): - return bool(re_search(SPLIT_REGEX, file)) +async def is_archive_split(file: str) -> bool: + """Check if the file is a split of an archived file.""" + return bool(re.search(SPLIT_REGEX, file)) -async def clean_target(path): - if await aiopath.exists(path): +async def clean_target(path: Union[str, plib.Path]) -> None: + """Clean the target path.""" + path = plib.Path(path) + if path.exists(): LOGGER.info(f"Cleaning Target: {path}") - if await aiopath.isdir(path): + if path.is_dir(): try: - await aiormtree(path) + await aioshutil.rmtree(path) except: pass - elif await aiopath.isfile(path): + elif path.is_file(): try: - await aioremove(path) + await aiofiles.os.remove(path) except: pass -async def clean_download(path): - if await aiopath.exists(path): +async def clean_download(path: Union[str, plib.Path]) -> None: + """Clean the download path.""" + path = plib.Path(path) + if path.exists(): LOGGER.info(f"Cleaning Download: {path}") try: - await aiormtree(path) + await aioshutil.rmtree(path) except: pass -async def start_cleanup(): +async def start_cleanup() -> None: + """Start the cleanup process.""" get_client().torrents_delete(torrent_hashes="all") try: - await aiormtree(DOWNLOAD_DIR) + await aioshutil.rmtree(DOWNLOAD_DIR) except: pass - await makedirs(DOWNLOAD_DIR) + await aiofiles.os.makedirs(DOWNLOAD_DIR) -def clean_all(): +def clean_all() -> None: + """Clean all downloads and exit.""" aria2.remove_all(True) get_client().torrents_delete(torrent_hashes="all") try: - rmtree(DOWNLOAD_DIR) + shutil.rmtree(DOWNLOAD_DIR) except: pass -def exit_clean_up(signal, frame): +def exit_clean_up(signal, frame) -> None: + """Clean up and exit.""" try: LOGGER.info( "Please wait, while we clean up and stop the running downloads") clean_all() - srun(['pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg']) - sexit(0) + subprocess.run(['pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg'], + check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + exit(0) except KeyboardInterrupt: LOGGER.warning("Force Exiting before the cleanup finishes!") - sexit(1) + exit(1) -async def clean_unwanted(path): +async def clean_unwanted(path: Union[str, plib.Path]) -> None: + """Clean unwanted files and folders.""" + path = plib.Path(path) LOGGER.info(f"Cleaning unwanted files/folders: {path}") - for dirpath, _, files in await sync_to_async(walk, path, topdown=False): + async for dirpath, _, files in sync_to_async(path.rglob, path, topdown=False): for filee in files: if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'): - await aioremove(ospath.join(dirpath, filee)) - if dirpath.endswith((".unwanted", "splited_files_mltb", "copied_mltb")): - await aiormtree(dirpath) - for dirpath, _, files in await sync_to_async(walk, path, topdown=False): - if not await listdir(dirpath): - await rmdir(dirpath) - - -async def get_path_size(path): - if await aiopath.isfile(path): - return await aiopath.getsize(path) + await aiofiles.os.remove(dirpath / filee) + if dirpath.name in (".unwanted", "splited_files_mltb", "copied_mltb"): + await aioshutil.rmtree(dirpath) + for dirpath, _, files in await sync_to_async(path.rglob, path, topdown=False): + if not await asyncio.gather(*(aiofiles.os.path.exists(p) for p in dirpath.glob("*"))): + await aioshutil.rmtree(dirpath) + + +async def get_path_size(path: Union[str, plib.Path]) -> int: + """Get the size of the path.""" + path = plib.Path(path) + if path.is_file(): + return await aiofiles.os.stat(path).size total_size = 0 - for root, dirs, files in await sync_to_async(walk, path): + async for dirpath, _, files in sync_to_async(path.rglob, path, topdown=False): for f in files: - abs_path = ospath.join(root, f) - total_size += await aiopath.getsize(abs_path) + abs_path = dirpath / f + total_size += await aiofiles.os.stat(abs_path).size return total_size -async def count_files_and_folders(path): +async def count_files_and_folders(path: Union[str, plib.Path]) -> Tuple[int, int]: + """Count the number of files and folders in the path.""" + path = plib.Path(path) total_files = 0 total_folders = 0 - for _, dirs, files in await sync_to_async(walk, path): + async for _, dirs, files in sync_to_async(path.rglob, path, topdown=False): total_files += len(files) for f in files: if f.endswith(tuple(GLOBAL_EXTENSION_FILTER)): @@ -124,26 +145,29 @@ async def count_files_and_folders(path): return total_folders, total_files -def get_base_name(orig_path): +def get_base_name(orig_path: str) -> str: + """Get the base name of the file.""" extension = next( (ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), '' ) if extension != '': - return re_split(f'{extension}$', orig_path, maxsplit=1, flags=I)[0] + return re.split(f'{extension}$', orig_path, maxsplit=1, flags=re.IGNORECASE)[0] else: raise NotSupportedExtractionArchive( 'File format not supported for extraction') -def get_mime_type(file_path): - mime = Magic(mime=True) +def get_mime_type(file_path: str) -> str: + """Get the mime type of the file.""" + mime = magic.Magic(mime=True) mime_type = mime.from_file(file_path) mime_type = mime_type or "text/plain" return mime_type -def check_storage_threshold(size, threshold, arch=False, alloc=False): - free = disk_usage(DOWNLOAD_DIR).free +def check_storage_threshold(size: int, threshold: int, arch: bool = False, alloc: bool = False) -> bool: + """Check if the storage threshold is met.""" + free = shutil.disk_usage(DOWNLOAD_DIR).free if not alloc: if (not arch and free - size < threshold or arch and free - (size * 2) < threshold): return False @@ -155,20 +179,22 @@ def check_storage_threshold(size, threshold, arch=False, alloc=False): return True -async def join_files(path): - files = await listdir(path) +async def join_files(path: Union[str, plib.Path]) -> None: + """Join the split files.""" + path = plib.Path(path) + files = await asyncio.gather(*(aiofiles.os.listdir(path))) results = [] for file_ in files: - if re_search(r"\.0+2$", file_) and await sync_to_async(get_mime_type, f'{path}/{file_}') == 'application/octet-stream': + if re.search(r"\.0+2$", file_) and await sync_to_async(get_mime_type, str(path / file_)) == 'application/octet-stream': final_name = file_.rsplit('.', 1)[0] cmd = f'cat {path}/{final_name}.* > {path}/{final_name}' - _, stderr, code = await cmd_exec(cmd, True) + _, stderr, code = await sync_to_async(subprocess.run, cmd, shell=True, capture_output=True) if code != 0: - LOGGER.error(f'Failed to join {final_name}, stderr: {stderr}') + LOGGER.error(f'Failed to join {final_name}, stderr: {stderr.decode()}') else: results.append(final_name) if results: for res in results: for file_ in files: - if re_search(fr"{res}\.0[0-9]+$", file_): - await aioremove(f'{path}/{file_}') + if re.search(fr"{res}\.0[0-9]+$", file_): + await aiofiles.os.remove(path / file_) diff --git a/bot/helper/ext_utils/leech_utils.py b/bot/helper/ext_utils/leech_utils.py index 6c2a60ac74..8f787ee78e 100644 --- a/bot/helper/ext_utils/leech_utils.py +++ b/bot/helper/ext_utils/leech_utils.py @@ -1,287 +1,21 @@ import hashlib -from re import sub as re_sub -from shlex import split as ssplit -from os import path as ospath -from aiofiles.os import remove as aioremove, path as aiopath, mkdir -from time import time -from re import search as re_search +import os +import re +import shlex from asyncio import create_subprocess_exec from asyncio.subprocess import PIPE +from pathlib import Path +from typing import Any, Callable, List, Optional, Tuple +import aiofiles +from aiofiles.os import remove as aioremove, path as aiopath, mkdir from bot import LOGGER, MAX_SPLIT_SIZE, config_dict, user_data from bot.modules.mediainfo import parseinfo from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async, get_readable_file_size, get_readable_time from bot.helper.ext_utils.fs_utils import ARCH_EXT, get_mime_type from bot.helper.ext_utils.telegraph_helper import telegraph -async def is_multi_streams(path): - try: - result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format", - "json", "-show_streams", path]) - if res := result[1]: - LOGGER.warning(f'Get Video Streams: {res}') - except Exception as e: - LOGGER.error(f'Get Video Streams: {e}. Mostly File not found!') - return False - fields = eval(result[0]).get('streams') - if fields is None: - LOGGER.error(f"get_video_streams: {result}") - return False - videos = 0 - audios = 0 - for stream in fields: - if stream.get('codec_type') == 'video': - videos += 1 - elif stream.get('codec_type') == 'audio': - audios += 1 - return videos > 1 or audios > 1 - - -async def get_media_info(path): +async def is_multi_streams(path: str) -> bool: + """Check if the media file has multiple video or audio streams.""" try: - result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format", - "json", "-show_format", path]) - if res := result[1]: - LOGGER.warning(f'Get Media Info: {res}') - except Exception as e: - LOGGER.error(f'Get Media Info: {e}. Mostly File not found!') - return 0, None, None - fields = eval(result[0]).get('format') - if fields is None: - LOGGER.error(f"get_media_info: {result}") - return 0, None, None - duration = round(float(fields.get('duration', 0))) - tags = fields.get('tags', {}) - artist = tags.get('artist') or tags.get('ARTIST') - title = tags.get('title') or tags.get('TITLE') - return duration, artist, title - - -async def get_document_type(path): - is_video, is_audio, is_image = False, False, False - if path.endswith(tuple(ARCH_EXT)) or re_search(r'.+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$', path): - return is_video, is_audio, is_image - mime_type = await sync_to_async(get_mime_type, path) - if mime_type.startswith('audio'): - return False, True, False - if mime_type.startswith('image'): - return False, False, True - if not mime_type.startswith('video') and not mime_type.endswith('octet-stream'): - return is_video, is_audio, is_image - try: - result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format", - "json", "-show_streams", path]) - if res := result[1]: - LOGGER.warning(f'Get Document Type: {res}') - except Exception as e: - LOGGER.error(f'Get Document Type: {e}. Mostly File not found!') - return is_video, is_audio, is_image - fields = eval(result[0]).get('streams') - if fields is None: - LOGGER.error(f"get_document_type: {result}") - return is_video, is_audio, is_image - for stream in fields: - if stream.get('codec_type') == 'video': - is_video = True - elif stream.get('codec_type') == 'audio': - is_audio = True - return is_video, is_audio, is_image - - -async def take_ss(video_file, duration): - des_dir = 'Thumbnails' - if not await aiopath.exists(des_dir): - await mkdir(des_dir) - des_dir = ospath.join(des_dir, f"{time()}.jpg") - if duration is None: - duration = (await get_media_info(video_file))[0] - if duration == 0: - duration = 3 - duration = duration // 2 - cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration), - "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir] - status = await create_subprocess_exec(*cmd, stderr=PIPE) - if await status.wait() != 0 or not await aiopath.exists(des_dir): - err = (await status.stderr.read()).decode().strip() - LOGGER.error( - f'Error while extracting thumbnail. Name: {video_file} stderr: {err}') - return None - return des_dir - - -async def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, multi_streams=True): - if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9: - return False - if listener.seed and not listener.newDir: - dirpath = f"{dirpath}/splited_files_mltb" - if not await aiopath.exists(dirpath): - await mkdir(dirpath) - user_id = listener.message.from_user.id - user_dict = user_data.get(user_id, {}) - leech_split_size = user_dict.get( - 'split_size') or config_dict['LEECH_SPLIT_SIZE'] - parts = -(-size // leech_split_size) - if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop: - split_size = ((size + parts - 1) // parts) + 1000 - if (await get_document_type(path))[0]: - if multi_streams: - multi_streams = await is_multi_streams(path) - duration = (await get_media_info(path))[0] - base_name, extension = ospath.splitext(file_) - split_size -= 5000000 - while i <= parts or start_time < duration - 4: - parted_name = f"{base_name}.part{i:03}{extension}" - out_path = ospath.join(dirpath, parted_name) - cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time), "-i", path, - "-fs", str(split_size), "-map", "0", "-map_chapters", "-1", "-async", "1", "-strict", - "-2", "-c", "copy", out_path] - if not multi_streams: - del cmd[10] - del cmd[10] - if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9: - return False - listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE) - code = await listener.suproc.wait() - if code == -9: - return False - elif code != 0: - err = (await listener.suproc.stderr.read()).decode().strip() - try: - await aioremove(out_path) - except: - pass - if multi_streams: - LOGGER.warning( - f"{err}. Retrying without map, -map 0 not working in all situations. Path: {path}") - return await split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, False) - else: - LOGGER.warning( - f"{err}. Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}") - return "errored" - out_size = await aiopath.getsize(out_path) - if out_size > MAX_SPLIT_SIZE: - dif = out_size - MAX_SPLIT_SIZE - split_size -= dif + 5000000 - await aioremove(out_path) - return await split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, ) - lpd = (await get_media_info(out_path))[0] - if lpd == 0: - LOGGER.error( - f'Something went wrong while splitting, mostly file is corrupted. Path: {path}') - break - elif duration == lpd: - LOGGER.warning( - f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {path}") - break - elif lpd <= 3: - await aioremove(out_path) - break - start_time += lpd - 3 - i += 1 - else: - out_path = ospath.join(dirpath, f"{file_}.") - listener.suproc = await create_subprocess_exec("split", "--numeric-suffixes=1", "--suffix-length=3", - f"--bytes={split_size}", path, out_path, stderr=PIPE) - code = await listener.suproc.wait() - if code == -9: - return False - elif code != 0: - err = (await listener.suproc.stderr.read()).decode().strip() - LOGGER.error(err) - return True - -async def format_filename(file_, user_id, dirpath=None, isMirror=False): - user_dict = user_data.get(user_id, {}) - ftag, ctag = ('m', 'MIRROR') if isMirror else ('l', 'LEECH') - prefix = config_dict[f'{ctag}_FILENAME_PREFIX'] if (val:=user_dict.get(f'{ftag}prefix', '')) == '' else val - remname = config_dict[f'{ctag}_FILENAME_REMNAME'] if (val:=user_dict.get(f'{ftag}remname', '')) == '' else val - suffix = config_dict[f'{ctag}_FILENAME_SUFFIX'] if (val:=user_dict.get(f'{ftag}suffix', '')) == '' else val - lcaption = config_dict['LEECH_FILENAME_CAPTION'] if (val:=user_dict.get('lcaption', '')) == '' else val - - prefile_ = file_ - # SD-Style V2 ~ WZML-X - if file_.startswith('www'): #Remove all www.xyz.xyz domains - file_ = ' '.join(file_.split()[1:]) - - if remname: - if not remname.startswith('|'): - remname = f"|{remname}" - remname = remname.replace('\s', ' ') - slit = remname.split("|") - __newFileName = ospath.splitext(file_)[0] - for rep in range(1, len(slit)): - args = slit[rep].split(":") - if len(args) == 3: - __newFileName = re_sub(args[0], args[1], __newFileName, int(args[2])) - elif len(args) == 2: - __newFileName = re_sub(args[0], args[1], __newFileName) - elif len(args) == 1: - __newFileName = re_sub(args[0], '', __newFileName) - file_ = __newFileName + ospath.splitext(file_)[1] - LOGGER.info(f"New Remname : {file_}") - - nfile_ = file_ - if prefix: - nfile_ = prefix.replace('\s', ' ') + file_ - prefix = re_sub('<.*?>', '', prefix).replace('\s', ' ') - if not file_.startswith(prefix): - file_ = f"{prefix}{file_}" - - if suffix and not isMirror: - suffix = suffix.replace('\s', ' ') - sufLen = len(suffix) - fileDict = file_.split('.') - _extIn = 1 + len(fileDict[-1]) - _extOutName = '.'.join( - fileDict[:-1]).replace('.', ' ').replace('-', ' ') - _newExtFileName = f"{_extOutName}{suffix}.{fileDict[-1]}" - if len(_extOutName) > (64 - (sufLen + _extIn)): - _newExtFileName = ( - _extOutName[: 64 - (sufLen + _extIn)] - + f"{suffix}.{fileDict[-1]}" - ) - file_ = _newExtFileName - elif suffix: - suffix = suffix.replace('\s', ' ') - file_ = f"{ospath.splitext(file_)[0]}{suffix}{ospath.splitext(file_)[1]}" if '.' in file_ else f"{file_}{suffix}" - - - cap_mono = f"<{config_dict['CAP_FONT']}>{nfile_}" if config_dict['CAP_FONT'] else nfile_ - if lcaption and dirpath and not isMirror: - lcaption = lcaption.replace('\|', '%%').replace('\s', ' ') - slit = lcaption.split("|") - up_path = ospath.join(dirpath, prefile_) - cap_mono = slit[0].format( - filename = nfile_, - size = get_readable_file_size(await aiopath.getsize(up_path)), - duration = get_readable_time((await get_media_info(up_path))[0]), - md5_hash = get_md5_hash(up_path) - ) - if len(slit) > 1: - for rep in range(1, len(slit)): - args = slit[rep].split(":") - if len(args) == 3: - cap_mono = cap_mono.replace(args[0], args[1], int(args[2])) - elif len(args) == 2: - cap_mono = cap_mono.replace(args[0], args[1]) - elif len(args) == 1: - cap_mono = cap_mono.replace(args[0], '') - cap_mono = cap_mono.replace('%%', '|') - return file_, cap_mono - - -async def get_mediainfo_link(up_path): - stdout, __, _ = await cmd_exec(ssplit(f'mediainfo "{up_path}"')) - tc = f"📌

{ospath.basename(up_path)}



" - if len(stdout) != 0: - tc += parseinfo(stdout) - link_id = (await telegraph.create_page(title="MediaInfo X", content=tc))["path"] - return f"https://graph.org/{link_id}" - -def get_md5_hash(up_path): - md5_hash = hashlib.md5() - with open(up_path, "rb") as f: - for byte_block in iter(lambda: f.read(4096), b""): - md5_hash.update(byte_block) - return md5_hash.hexdigest() diff --git a/bot/helper/ext_utils/shortners.py b/bot/helper/ext_utils/shortners.py index 8e365c7323..e3c6168432 100644 --- a/bot/helper/ext_utils/shortners.py +++ b/bot/helper/ext_utils/shortners.py @@ -1,58 +1,157 @@ +import logging +import time from base64 import b64encode -from random import choice, random, randrange -from time import sleep -from urllib.parse import quote +from typing import Any, Dict, List, Optional, Union +import requests from cloudscraper import create_scraper -from urllib3 import disable_warnings +from urllib3 import Timeout from bot import LOGGER, shorteners_list +logger = logging.getLogger(__name__) -def short_url(longurl, attempt=0): +def short_url(long_url: str, attempt: int = 0) -> Optional[str]: + """Shorten a long URL using various URL shortening services. + + Args: + long_url (str): The long URL to shorten. + attempt (int, optional): The number of attempts to shorten the URL. Defaults to 0. + + Returns: + Optional[str]: The shortened URL, or None if the maximum number of attempts has been reached. + """ if not shorteners_list: - return longurl + return long_url + if attempt >= 4: - return longurl - i = 0 if len(shorteners_list) == 1 else randrange(len(shorteners_list)) - _shorten_dict = shorteners_list[i] - _shortener = _shorten_dict['domain'] - _shortener_api = _shorten_dict['api_key'] - cget = create_scraper().request - disable_warnings() + return long_url + + shortener = choice(shorteners_list) + shortener_api = shortener["api_key"] + + scraper = create_scraper() + scraper.request( + "GET", + f"https://{shortener['domain']}/api?api={shortener_api}&url={long_url}", + timeout=Timeout(10.0), + ) + try: - if "shorte.st" in _shortener: - headers = {'public-api-token': _shortener_api} - data = {'urlToShorten': quote(longurl)} - return cget('PUT', 'https://api.shorte.st/v1/data/url', headers=headers, data=data).json()['shortenedUrl'] - elif "linkvertise" in _shortener: - url = quote(b64encode(longurl.encode("utf-8"))) - linkvertise = [ - f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}"] - return choice(linkvertise) - elif "bitly.com" in _shortener: - headers = {"Authorization": f"Bearer {_shortener_api}"} - return cget('POST', "https://api-ssl.bit.ly/v4/shorten", json={"long_url": longurl}, headers=headers).json()["link"] - elif "ouo.io" in _shortener: - return cget('GET', f'http://ouo.io/api/{_shortener_api}?s={longurl}', verify=False).text - elif "cutt.ly" in _shortener: - return cget('GET', f'http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}').json()['url']['shortLink'] - else: - res = cget('GET', f'https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}').json() - shorted = res['shortenedUrl'] - if not shorted: - shrtco_res = cget('GET', f'https://api.shrtco.de/v2/shorten?url={quote(longurl)}').json() - shrtco_link = shrtco_res['result']['full_short_link'] - res = cget('GET', f'https://{_shortener}/api?api={_shortener_api}&url={shrtco_link}').json() - shorted = res['shortenedUrl'] - if not shorted: - shorted = longurl - return shorted - except Exception as e: - LOGGER.error(e) - sleep(1) - attempt +=1 - return short_url(longurl, attempt) + shortened_url = shortener_response(scraper, long_url, shortener, shortener_api) + if shortened_url: + return shortened_url + + attempt += 1 + return short_url(long_url, attempt) + + except (requests.exceptions.RequestException, KeyError, TypeError) as e: + logger.error(e) + time.sleep(1) + attempt += 1 + return short_url(long_url, attempt) + +def shortener_response( + scraper: create_scraper, + long_url: str, + shortener: Dict[str, Union[str, List[str]]], + shortener_api: str, +) -> Optional[str]: + """Shorten a long URL using a specific URL shortening service. + + Args: + scraper (create_scraper): The cloudscraper instance to use for the request. + long_url (str): The long URL to shorten. + shortener (Dict[str, Union[str, List[str]]]): The shortener configuration dictionary. + shortener_api (str): The shortener API key. + + Returns: + Optional[str]: The shortened URL, or None if the shortener service failed to respond. + """ + if "shorte.st" in shortener["domain"]: + headers = {"public-api-token": shortener_api} + data = {"urlToShorten": b64encode(long_url.encode("utf-8")).decode("utf-8")} + response = scraper.request( + "PUT", + "https://api.shorte.st/v1/data/url", + headers=headers, + data=data, + ) + if response.status_code == 200: + return response.json()["shortenedUrl"] + + elif "linkvertise" in shortener["domain"]: + url = b64encode(long_url.encode("utf-8")).decode("utf-8") + linkvertise = [ + f"https://link-to.net/{shortener_api}/{random() * 1000}/dynamic?r={url}", + f"https://up-to-down.net/{shortener_api}/{random() * 1000}/dynamic?r={url}", + f"https://direct-link.net/{shortener_api}/{random() * 1000}/dynamic?r={url}", + f"https://file-link.net/{shortener_api}/{random() * 1000}/dynamic?r={url}"] + response = scraper.request( + "GET", + choice(linkvertise), + timeout=Timeout(10.0), + ) + if response.status_code == 200: + return response.text + + elif "bitly.com" in shortener["domain"]: + headers = {"Authorization": f"Bearer {shortener_api}"} + data = {"long_url": long_url} + response = scraper.request( + "POST", + "https://api-ssl.bit.ly/v4/shorten", + json=data, + headers=headers, + timeout=Timeout(10.0), + ) + if response.status_code == 200: + return response.json()["link"] + + elif "ouo.io" in shortener["domain"]: + response = scraper.request( + "GET", + f'http://ouo.io/api/{shortener_api}?s={long_url}', + timeout=Timeout(10.0), + ) + if response.status_code == 200: + return response.text + + elif "cutt.ly" in shortener["domain"]: + response = scraper.request( + "GET", + f'http://cutt.ly/api/api.php?key={shortener_api}&short={long_url}', + timeout=Timeout(10.0), + ) + if response.status_code == 200: + return response.json()["url"]["shortLink"] + + else: + response = scraper.request( + "GET", + f'https://{shortener["domain"]}/api?api={shortener_api}&url={long_url}', + timeout=Timeout(10.0), + ) + if response.status_code == 200: + shortened_url = response.json().get("shortenedUrl") + if shortened_url: + return shortened_url + + shrtco_response = scraper.request( + "GET", + f'https://api.shrtco.de/v2/shorten?url={long_url}', + timeout=Timeout(10.0), + ) + if shrtco_response.status_code == 200: + shrtco_link = shrtco_response.json()["result"]["full_short_link"] + response = scraper.request( + "GET", + f'https://{shortener["domain"]}/api?api={shortener_api}&url={shrtco_link}', + timeout=Timeout(10.0), + ) + if response.status_code == 200: + shortened_url = response.json().get("shortenedUrl") + if shortened_url: + return shortened_url + + return None diff --git a/bot/helper/ext_utils/task_manager.py b/bot/helper/ext_utils/task_manager.py index 5cfbe022e4..6c3acb19b3 100644 --- a/bot/helper/ext_utils/task_manager.py +++ b/bot/helper/ext_utils/task_manager.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python3 -from time import time -from asyncio import Event +import asyncio +import time +from typing import List, Dict, Union, Optional from bot import bot_cache, config_dict, queued_dl, queued_up, non_queued_up, non_queued_dl, queue_dict_lock, LOGGER, user_data, download_dict from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper @@ -9,42 +9,35 @@ from bot.helper.telegram_helper.message_utils import forcesub, check_botpm from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.themes import BotTheme +from bot.helper.ext_utils.exceptions import SizeLimitExceeded +from datetime import timedelta -async def stop_duplicate_check(name, listener): - if ( - not config_dict['STOP_DUPLICATE'] - or listener.isLeech - or listener.upPath != 'gd' - or listener.select - ): - return False, None +async def stop_duplicate_check(name: str, listener) -> Optional[tuple[str, List[dict]]]: + if not config_dict['STOP_DUPLICATE'] or listener.isLeech or listener.upPath != 'gd' or listener.select: + return None LOGGER.info(f'Checking File/Folder if already in Drive: {name}') - if listener.compress: - name = f"{name}.zip" - elif listener.extract: - try: - name = get_base_name(name) - except: - name = None - if name is not None: + try: telegraph_content, contents_no = await sync_to_async(GoogleDriveHelper().drive_list, name, stopDup=True) - if telegraph_content: - msg = BotTheme('STOP_DUPLICATE', content=contents_no) - button = await get_telegraph_list(telegraph_content) - return msg, button - return False, None - - -async def timeval_check(user_id): + except Exception as e: + LOGGER.error(f'Error in Google Drive List: {e}') + return None + if telegraph_content: + msg = BotTheme('STOP_DUPLICATE', content=contents_no) + button = await get_telegraph_list(telegraph_content) + return msg, button + return None + + +async def timeval_check(user_id: int) -> Optional[timedelta]: bot_cache.setdefault('time_interval', {}) - if (time_interval := bot_cache['time_interval'].get(user_id, False)) and (time() - time_interval) < (UTI := config_dict['USER_TIME_INTERVAL']): + if time_interval := bot_cache['time_interval'].get(user_id, False) and (time() - time_interval) < (UTI := config_dict['USER_TIME_INTERVAL']): return UTI - (time() - time_interval) bot_cache['time_interval'][user_id] = time() return None -async def is_queued(uid): +async def is_queued(uid: int) -> tuple[bool, Optional[asyncio.Event]]: all_limit = config_dict['QUEUE_ALL'] dl_limit = config_dict['QUEUE_DOWNLOAD'] event = None @@ -60,12 +53,12 @@ async def is_queued(uid): return added_to_queue, event -def start_dl_from_queued(uid): +def start_dl_from_queued(uid: int): queued_dl[uid].set() del queued_dl[uid] -def start_up_from_queued(uid): +def start_up_from_queued(uid: int): queued_up[uid].set() del queued_up[uid] @@ -125,85 +118,102 @@ async def start_from_queued(): start_dl_from_queued(uid) -async def limit_checker(size, listener, isTorrent=False, isMega=False, isDriveLink=False, isYtdlp=False, isPlayList=None): +async def limit_checker(size: int, listener, isTorrent: bool = False, isMega: bool = False, isDriveLink: bool = False, isYtdlp: bool = False, isPlayList: Optional[int] = None) -> Optional[str]: LOGGER.info('Checking Size Limit of link/file/folder/tasks...') user_id = listener.message.from_user.id if await CustomFilters.sudo('', listener.message): return limit_exceeded = '' - if listener.isClone: - if CLONE_LIMIT := config_dict['CLONE_LIMIT']: - limit = CLONE_LIMIT * 1024**3 - if size > limit: - limit_exceeded = f'Clone limit is {get_readable_file_size(limit)}.' - elif isMega: - if MEGA_LIMIT := config_dict['MEGA_LIMIT']: - limit = MEGA_LIMIT * 1024**3 - if size > limit: - limit_exceeded = f'Mega limit is {get_readable_file_size(limit)}' - elif isDriveLink: - if GDRIVE_LIMIT := config_dict['GDRIVE_LIMIT']: - limit = GDRIVE_LIMIT * 1024**3 - if size > limit: - limit_exceeded = f'Google drive limit is {get_readable_file_size(limit)}' - elif isYtdlp: - if YTDLP_LIMIT := config_dict['YTDLP_LIMIT']: - limit = YTDLP_LIMIT * 1024**3 - if size > limit: - limit_exceeded = f'Ytdlp limit is {get_readable_file_size(limit)}' - if isPlayList != 0 and (PLAYLIST_LIMIT := config_dict['PLAYLIST_LIMIT']): - if isPlayList > PLAYLIST_LIMIT: - limit_exceeded = f'Playlist limit is {PLAYLIST_LIMIT}' - elif isTorrent: - if TORRENT_LIMIT := config_dict['TORRENT_LIMIT']: - limit = TORRENT_LIMIT * 1024**3 - if size > limit: - limit_exceeded = f'Torrent limit is {get_readable_file_size(limit)}' - elif DIRECT_LIMIT := config_dict['DIRECT_LIMIT']: - limit = DIRECT_LIMIT * 1024**3 - if size > limit: - limit_exceeded = f'Direct limit is {get_readable_file_size(limit)}' - - if not limit_exceeded: - if (LEECH_LIMIT := config_dict['LEECH_LIMIT']) and listener.isLeech: - limit = LEECH_LIMIT * 1024**3 + try: + if listener.isClone: + if CLONE_LIMIT := config_dict['CLONE_LIMIT']: + limit = CLONE_LIMIT * 1024**3 + if size > limit: + limit_exceeded = f'Clone limit is {get_readable_file_size(limit)}.' + raise SizeLimitExceeded(limit_exceeded) + + elif isMega: + if MEGA_LIMIT := config_dict['MEGA_LIMIT']: + limit = MEGA_LIMIT * 1024**3 + if size > limit: + limit_exceeded = f'Mega limit is {get_readable_file_size(limit)}' + raise SizeLimitExceeded(limit_exceeded) + + elif isDriveLink: + if GDRIVE_LIMIT := config_dict['GDRIVE_LIMIT']: + limit = GDRIVE_LIMIT * 1024**3 + if size > limit: + limit_exceeded = f'Google drive limit is {get_readable_file_size(limit)}' + raise SizeLimitExceeded(limit_exceeded) + + elif isYtdlp: + if YTDLP_LIMIT := config_dict['YTDLP_LIMIT']: + limit = YTDLP_LIMIT * 1024**3 + if size > limit: + limit_exceeded = f'Ytdlp limit is {get_readable_file_size(limit)}' + raise SizeLimitExceeded(limit_exceeded) + if isPlayList != 0 and (PLAYLIST_LIMIT := config_dict['PLAYLIST_LIMIT']): + if isPlayList > PLAYLIST_LIMIT: + limit_exceeded = f'Playlist limit is {PLAYLIST_LIMIT}' + raise SizeLimitExceeded(limit_exceeded) + + elif isTorrent: + if TORRENT_LIMIT := config_dict['TORRENT_LIMIT']: + limit = TORRENT_LIMIT * 1024**3 + if size > limit: + limit_exceeded = f'Torrent limit is {get_readable_file_size(limit)}' + raise SizeLimitExceeded(limit_exceeded) + + elif DIRECT_LIMIT := config_dict['DIRECT_LIMIT']: + limit = DIRECT_LIMIT * 1024**3 if size > limit: - limit_exceeded = f'Leech limit is {get_readable_file_size(limit)}' + limit_exceeded = f'Direct limit is {get_readable_file_size(limit)}' + raise SizeLimitExceeded(limit_exceeded) - if (STORAGE_THRESHOLD := config_dict['STORAGE_THRESHOLD']) and not listener.isClone: - arch = any([listener.compress, listener.extract]) - limit = STORAGE_THRESHOLD * 1024**3 - acpt = await sync_to_async(check_storage_threshold, size, limit, arch) - if not acpt: - limit_exceeded = f'You must leave {get_readable_file_size(limit)} free storage.' - - if config_dict['DAILY_TASK_LIMIT'] and config_dict['DAILY_TASK_LIMIT'] <= await getdailytasks(user_id): - limit_exceeded = f"Daily Total Task Limit: {config_dict['DAILY_TASK_LIMIT']}\nYou have exhausted all your Daily Task Limits." - else: - ttask = await getdailytasks(user_id, increase_task=True) - LOGGER.info(f"User: {user_id} | Daily Tasks: {ttask}") - if (DAILY_MIRROR_LIMIT := config_dict['DAILY_MIRROR_LIMIT']) and not listener.isLeech: - limit = DAILY_MIRROR_LIMIT * 1024**3 - if (size >= (limit - await getdailytasks(user_id, check_mirror=True)) or limit <= await getdailytasks(user_id, check_mirror=True)): - limit_exceeded = f'Daily Mirror Limit is {get_readable_file_size(limit)}\nYou have exhausted all your Daily Mirror Limit.' - elif not listener.isLeech: - msize = await getdailytasks(user_id, upmirror=size, check_mirror=True) - LOGGER.info(f"User : {user_id} | Daily Mirror Size : {get_readable_file_size(msize)}") - if (DAILY_LEECH_LIMIT := config_dict['DAILY_LEECH_LIMIT']) and listener.isLeech: - limit = DAILY_LEECH_LIMIT * 1024**3 - if (size >= (limit - await getdailytasks(user_id, check_leech=True)) or limit <= await getdailytasks(user_id, check_leech=True)): - limit_exceeded = f'Daily Leech Limit is {get_readable_file_size(limit)}\nYou have exhausted all your Daily Leech Limit.' - elif listener.isLeech: - lsize = await getdailytasks(user_id, upleech=size, check_leech=True) - LOGGER.info(f"User : {user_id} | Daily Leech Size : {get_readable_file_size(lsize)}") - if limit_exceeded: - if size: - return f"{limit_exceeded}.\nYour List/File/Folder size is {get_readable_file_size(size)}." - elif isPlayList != 0: - return f"{limit_exceeded}.\nYour playlist has {isPlayList} files." - - -async def task_utils(message): + if not limit_exceeded: + if (LEECH_LIMIT := config_dict['LEECH_LIMIT']) and listener.isLeech: + limit = LEECH_LIMIT * 1024**3 + if size > limit: + limit_exceeded = f'Leech limit is {get_readable_file_size(limit)}' + raise SizeLimitExceeded(limit_exceeded) + + if (STORAGE_THRESHOLD := config_dict['STORAGE_THRESHOLD']) and not listener.isClone: + arch = any([listener.compress, listener.extract]) + limit = STORAGE_THRESHOLD * 1024**3 + acpt = await sync_to_async(check_storage_threshold, size, limit, arch) + if not acpt: + limit_exceeded = f'You must leave {get_readable_file_size(limit)} free storage.' + raise SizeLimitExceeded(limit_exceeded) + + if config_dict['DAILY_TASK_LIMIT'] and config_dict['DAILY_TASK_LIMIT'] <= await getdailytasks(user_id): + limit_exceeded = f"Daily Total Task Limit: {config_dict['DAILY_TASK_LIMIT']}\nYou have exhausted all your Daily Task Limits." + raise SizeLimitExceeded(limit_exceeded) + else: + ttask = await getdailytasks(user_id, increase_task=True) + LOGGER.info(f"User: {user_id} | Daily Tasks: {ttask}") + + if (DAILY_MIRROR_LIMIT := config_dict['DAILY_MIRROR_LIMIT']) and not listener.isLeech: + limit = DAILY_MIRROR_LIMIT * 1024**3 + if (size >= (limit - await getdailytasks(user_id, check_mirror=True)) or limit <= await getdailytasks(user_id, check_mirror=True)): + limit_exceeded = f'Daily Mirror Limit is {get_readable_file_size(limit)}\nYou have exhausted all your Daily Mirror Limit.' + raise SizeLimitExceeded(limit_exceeded) + elif not listener.isLeech: + msize = await getdailytasks(user_id, upmirror=size, check_mirror=True) + LOGGER.info(f"User : {user_id} | Daily Mirror Size : {get_readable_file_size(msize)}") + + if (DAILY_LEECH_LIMIT := config_dict['DAILY_LEECH_LIMIT']) and listener.isLeech: + limit = DAILY_LEECH_LIMIT * 1024**3 + if (size >= (limit - await getdailytasks(user_id, check_leech=True)) or limit <= await getdailytasks(user_id, check_leech=True)): + limit_exceeded = f'Daily Leech Limit is {get_readable_file_size(limit)}\nYou have exhausted all your Daily Leech Limit.' + raise SizeLimitExceeded(limit_exceeded) + elif listener.isLeech: + lsize = await getdailytasks(user_id, upleech=size, check_leech=True) + LOGGER.info(f"User : {user_id} | Daily Leech Size : {get_readable_file_size(lsize)}") + except SizeLimitExceeded as e: + return str(e) + + +async def task_utils(message) -> tuple[List[str], Optional[List[dict]]]: LOGGER.info('Running Task Manager ...') msg = [] button = None diff --git a/bot/helper/ext_utils/telegraph_helper.py b/bot/helper/ext_utils/telegraph_helper.py index 284bc00784..1874c5a45b 100644 --- a/bot/helper/ext_utils/telegraph_helper.py +++ b/bot/helper/ext_utils/telegraph_helper.py @@ -7,25 +7,58 @@ from bot import LOGGER, bot_loop, config_dict - class TelegraphHelper: - def __init__(self, author_name=None, author_url=None): - self.telegraph = Telegraph(domain='graph.org') + """ + A helper class for working with the Telegraph API. + """ + + def __init__(self, telegraph: Telegraph, author_name: str = None, author_url: str = None): + """ + Initialize a new `TelegraphHelper` instance. + + :param telegraph: A `Telegraph` object for interacting with the Telegraph API. + :param author_name: The name of the account author. + :param author_url: The URL of the account author. + """ + self.telegraph = telegraph self.short_name = ''.join(SystemRandom().choices(ascii_letters, k=8)) self.access_token = None self.author_name = author_name self.author_url = author_url + def __str__(self): + """ + Return a human-readable representation of the `TelegraphHelper` object. + + :return: A string representation of the object. + """ + return f"TelegraphHelper(access_token='{self.access_token}', author_name='{self.author_name}', author_url='{self.author_url}', short_name='{self.short_name}')" + async def create_account(self): - await self.telegraph.create_account( - short_name=self.short_name, - author_name=self.author_name, - author_url=self.author_url - ) - self.access_token = self.telegraph.get_access_token() - LOGGER.info("Creating Telegraph Account") - - async def create_page(self, title, content): + """ + Create a new Telegraph account. + + :return: None + """ + try: + await self.telegraph.create_account( + short_name=self.short_name, + author_name=self.author_name, + author_url=self.author_url + ) + self.access_token = self.telegraph.get_access_token() + LOGGER.info("Creating Telegraph Account") + except Exception as e: + LOGGER.error(f"Error creating Telegraph account: {e}") + + async def create_page(self, title: str, content: str): + """ + Create a new Telegraph page. + + :param title: The title of the page. + :param content: The content of the page. + :return: The created page object. + """ try: return await self.telegraph.create_page( title=title, @@ -39,7 +72,15 @@ async def create_page(self, title, content): await sleep(st.retry_after) return await self.create_page(title, content) - async def edit_page(self, path, title, content): + async def edit_page(self, path: str, title: str, content: str): + """ + Edit an existing Telegraph page. + + :param path: The path of the page. + :param title: The title of the page. + :param content: The content of the page. + :return: The edited page object. + """ try: return await self.telegraph.edit_page( path=path, @@ -54,7 +95,14 @@ async def edit_page(self, path, title, content): await sleep(st.retry_after) return await self.edit_page(path, title, content) - async def edit_telegraph(self, path, telegraph_content): + async def edit_telegraph(self, path: list, telegraph_content: list): + """ + Edit multiple Telegraph pages. + + :param path: A list of paths for the pages to be edited. + :param telegraph_content: A list of content strings for the pages. + :return: None + """ nxt_page = 1 prev_page = 0 num_of_path = len(path) @@ -77,7 +125,11 @@ async def edit_telegraph(self, path, telegraph_content): return -telegraph = TelegraphHelper(config_dict['AUTHOR_NAME'], - config_dict['AUTHOR_URL']) +if __name__ == "__main__": + if config_dict is not None: + telegraph = TelegraphHelper(Telegraph(domain='graph.org'), config_dict['AUTHOR_NAME'], + config_dict['AUTHOR_URL']) -bot_loop.run_until_complete(telegraph.create_account()) + bot_loop.run_until_complete(telegraph.create_account()) + else: + LOGGER.error("config_dict is not defined") diff --git a/bot/helper/listeners/aria2_listener.py b/bot/helper/listeners/aria2_listener.py index 46492eed32..1898a80086 100644 --- a/bot/helper/listeners/aria2_listener.py +++ b/bot/helper/listeners/aria2_listener.py @@ -46,8 +46,7 @@ async def __onDownloadStarted(api, gid): dl = await getDownloadByGid(gid) if dl: if not hasattr(dl, 'listener'): - LOGGER.warning( - f"onDownloadStart: {gid}. at Download limit didn't pass since download completed earlier!") + LOGGER.warning(f"onDownloadStart: {gid}. at Download limit didn't pass since download completed earlier!") return listener = dl.listener() download = await sync_to_async(api.get_download, gid) @@ -55,7 +54,7 @@ async def __onDownloadStarted(api, gid): await sleep(3) download = download.live size = download.total_length - LOGGER.info(f"listener size : {size}") + LOGGER.info(f"Size : {size}") if limit_exceeded := await limit_checker(size, listener): await listener.onDownloadError(limit_exceeded) await sync_to_async(api.remove, [download], force=True, files=True) @@ -65,8 +64,7 @@ async def __onDownloadStarted(api, gid): dl = await getDownloadByGid(gid) if dl: if not hasattr(dl, 'listener'): - LOGGER.warning( - f"onDownloadStart: {gid}. STOP_DUPLICATE didn't pass since download completed earlier!") + LOGGER.warning(f"onDownloadStart: {gid}. STOP_DUPLICATE didn't pass since download completed earlier!") return listener = dl.listener() if not listener.isLeech and not listener.select and listener.upPath == 'gd': @@ -125,7 +123,8 @@ async def __onDownloadComplete(api, gid): if dl := await getDownloadByGid(gid): listener = dl.listener() await listener.onDownloadComplete() - await sync_to_async(api.remove, [download], force=True, files=True) + if not listener.multiAria: + await sync_to_async(api.remove, [download], force=True, files=True) @new_thread diff --git a/bot/helper/listeners/qbit_listener.py b/bot/helper/listeners/qbit_listener.py index e33d838962..1f5a7370cc 100644 --- a/bot/helper/listeners/qbit_listener.py +++ b/bot/helper/listeners/qbit_listener.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 -from asyncio import sleep +from typing import Any, AsyncContextManager, Dict, Optional + +import asyncio from time import time from bot import download_dict, download_dict_lock, get_client, QbInterval, config_dict, QbTorrents, qb_listener_lock, LOGGER, bot_loop @@ -10,165 +12,265 @@ from bot.helper.ext_utils.task_manager import limit_checker, stop_duplicate_check -async def __remove_torrent(client, hash_, tag): - await sync_to_async(client.torrents_delete, torrent_hashes=hash_, delete_files=True) +async def __remove_torrent(client: Any, hash_: str, tag: str) -> None: + try: + await sync_to_async(client.torrents_delete, torrent_hashes=hash_, delete_files=True) + except Exception as e: # noqa + LOGGER.error(f"Error deleting torrent {hash_}: {e}") + async with qb_listener_lock: if tag in QbTorrents: del QbTorrents[tag] - await sync_to_async(client.torrents_delete_tags, tags=tag) + + try: + await sync_to_async(client.torrents_delete_tags, tags=tag) + except Exception as e: # noqa + LOGGER.error(f"Error deleting tag {tag}: {e}") @new_task -async def __onDownloadError(err, tor, button=None): +async def __onDownloadError(err: str, tor: Any, button: Optional[Any] = None) -> None: LOGGER.info(f"Cancelling Download: {tor.name}") ext_hash = tor.hash download = await getDownloadByGid(ext_hash[:12]) - if not hasattr(download, 'client'): + + if not hasattr(download, "client"): return + listener = download.listener() client = download.client() - await listener.onDownloadError(err, button) - await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash) - await sleep(0.3) + + try: + await listener.onDownloadError(err, button) + except Exception as e: # noqa + LOGGER.error(f"Error in onDownloadError: {e}") + + try: + await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash) + except Exception as e: # noqa + LOGGER.error(f"Error pausing torrent {ext_hash}: {e}") + + await asyncio.sleep(0.3) await __remove_torrent(client, ext_hash, tor.tags) @new_task -async def __onSeedFinish(tor): +async def __onSeedFinish(tor: Any) -> None: ext_hash = tor.hash LOGGER.info(f"Cancelling Seed: {tor.name}") download = await getDownloadByGid(ext_hash[:12]) - if not hasattr(download, 'client'): + + if not hasattr(download, "client"): return + listener = download.listener() client = download.client() - msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time)}" - await listener.onUploadError(msg) + + try: + msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time)}" + await listener.onUploadError(msg) + except Exception as e: # noqa + LOGGER.error(f"Error in onUploadError: {e}") + await __remove_torrent(client, ext_hash, tor.tags) @new_task -async def __stop_duplicate(tor): +async def __stop_duplicate(tor: Any) -> None: download = await getDownloadByGid(tor.hash[:12]) - if not hasattr(download, 'listener'): + + if not hasattr(download, "listener"): return + listener = download.listener() - name = tor.content_path.rsplit('/', 1)[-1].rsplit('.!qB', 1)[0] - msg, button = await stop_duplicate_check(name, listener) + name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0] + + try: + msg, button = await stop_duplicate_check(name, listener) + except Exception as e: # noqa + LOGGER.error(f"Error in stop_duplicate_check: {e}") + return + if msg: - __onDownloadError(msg, tor, button) + await __onDownloadError(msg, tor, button) + @new_task -async def __size_checked(tor): +async def __size_checked(tor: Any) -> None: download = await getDownloadByGid(tor.hash[:12]) - if hasattr(download, 'listener'): + + if hasattr(download, "listener"): listener = download.listener() size = tor.size - if limit_exceeded := await limit_checker(size, listener, True): + + try: + limit_exceeded = await limit_checker(size, listener, True) + except Exception as e: # noqa + LOGGER.error(f"Error in limit_checker: {e}") + return + + if limit_exceeded: await __onDownloadError(limit_exceeded, tor) + @new_task -async def __onDownloadComplete(tor): +async def __onDownloadComplete(tor: Any) -> None: ext_hash = tor.hash tag = tor.tags - await sleep(2) + + await asyncio.sleep(2) download = await getDownloadByGid(ext_hash[:12]) - if not hasattr(download, 'client'): + + if not hasattr(download, "client"): return + listener = download.listener() client = download.client() - if not listener.seed: - await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash) - if listener.select: - await clean_unwanted(listener.dir) - await listener.onDownloadComplete() - client = await sync_to_async(get_client) - if listener.seed: - async with download_dict_lock: - if listener.uid in download_dict: - removed = False - download_dict[listener.uid] = QbittorrentStatus(listener, True) - else: - removed = True - if removed: - await __remove_torrent(client, ext_hash, tag) + + if not hasattr(listener, "seed"): + try: + await sync_to_async(client.torrents_pause, torrent_hashes=ext_hash) + except Exception as e: # noqa + LOGGER.error(f"Error pausing torrent {ext_hash}: {e}") + + if hasattr(listener, "select"): + try: + await clean_unwanted(listener.dir) + except Exception as e: # noqa + LOGGER.error(f"Error cleaning unwanted files: {e}") + + try: + await listener.onDownloadComplete() + except Exception as e: # noqa + LOGGER.error(f"Error in onDownloadComplete: {e}") + + try: + client = await sync_to_async(get_client) + except Exception as e: # noqa + LOGGER.error(f"Error getting client: {e}") + return + + if hasattr(listener, "seed"): + try: + async with download_dict_lock: + if listener.uid in download_dict: + removed = False + download_dict[listener.uid] = QbittorrentStatus(listener, True) + else: + removed = True + except Exception as e: # noqa + LOGGER.error(f"Error updating download dict: {e}") return - async with qb_listener_lock: - if tag in QbTorrents: - QbTorrents[tag]['seeding'] = True - else: + + if removed: + try: + await __remove_torrent(client, ext_hash, tag) + except Exception as e: # noqa + LOGGER.error(f"Error removing torrent {ext_hash}: {e}") return - await update_all_messages() + + try: + async with qb_listener_lock: + if tag in QbTorrents: + QbTorrents[tag]["seeding"] = True + else: + return + except Exception as e: # noqa + LOGGER.error(f"Error updating QbTorrents: {e}") + return + + try: + await update_all_messages() + except Exception as e: # noqa + LOGGER.error(f"Error updating all messages: {e}") + return + LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}") - await sync_to_async(client.auth_log_out) + + try: + await sync_to_async(client.auth_log_out) + except Exception as e: # noqa + LOGGER.error(f"Error logging out client: {e}") + return + else: - await __remove_torrent(client, ext_hash, tag) + try: + await __remove_torrent(client, ext_hash, tag) + except Exception as e: # noqa + LOGGER.error(f"Error removing torrent {ext_hash}: {e}") + return -async def __qb_listener(): - client = await sync_to_async(get_client) +async def __qb_listener() -> None: + try: + client = await sync_to_async(get_client) + except Exception as e: # noqa + LOGGER.error(f"Error getting client: {e}") + return + while True: - async with qb_listener_lock: - try: - if len(await sync_to_async(client.torrents_info)) == 0: - QbInterval.clear() - await sync_to_async(client.auth_log_out) + try: + async with qb_listener_lock: + if not QbTorrents: break + for tor_info in await sync_to_async(client.torrents_info): tag = tor_info.tags + if tag not in QbTorrents: continue + state = tor_info.state + if state == "metaDL": - TORRENT_TIMEOUT = config_dict['TORRENT_TIMEOUT'] - QbTorrents[tag]['stalled_time'] = time() + TORRENT_TIMEOUT = config_dict.get("TORRENT_TIMEOUT") + if TORRENT_TIMEOUT and time() - tor_info.added_on >= TORRENT_TIMEOUT: - __onDownloadError("Dead Torrent!", tor_info) + try: + await __onDownloadError("Dead Torrent!", tor_info) + except Exception as e: # noqa + LOGGER.error(f"Error in onDownloadError: {e}") + else: - await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash) + try: + await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash) + except Exception as e: # noqa + LOGGER.error(f"Error reannouncing torrent {tor_info.hash}: {e}") + elif state == "downloading": - QbTorrents[tag]['stalled_time'] = time() - if config_dict['STOP_DUPLICATE'] and not QbTorrents[tag]['stop_dup_check']: - QbTorrents[tag]['stop_dup_check'] = True + try: + QbTorrents[tag]["stalled_time"] = time() + except Exception as e: # noqa + LOGGER.error(f"Error updating stalled time: {e}") + + if config_dict.get("STOP_DUPLICATE") and not QbTorrents[tag].get("stop_dup_check"): + QbTorrents[tag]["stop_dup_check"] = True __stop_duplicate(tor_info) - if any([config_dict['STORAGE_THRESHOLD'], config_dict['TORRENT_LIMIT'], config_dict['LEECH_LIMIT']]) and not QbTorrents[tag]['size_checked']: - QbTorrents[tag]['size_checked'] = True + + if any( + [ + config_dict.get("STORAGE_THRESHOLD"), + config_dict.get("TORRENT_LIMIT"), + config_dict.get("LEECH_LIMIT"), + ] + ) and not QbTorrents[tag].get("size_checked"): + QbTorrents[tag]["size_checked"] = True __size_checked(tor_info) + elif state == "stalledDL": - TORRENT_TIMEOUT = config_dict['TORRENT_TIMEOUT'] - if not QbTorrents[tag]['rechecked'] and 0.99989999999999999 < tor_info.progress < 1: + TORRENT_TIMEOUT = config_dict.get("TORRENT_TIMEOUT") + + if not QbTorrents[tag].get("rechecked") and 0.99989999999999999 < tor_info.progress < 1: msg = f"Force recheck - Name: {tor_info.name} Hash: " msg += f"{tor_info.hash} Downloaded Bytes: {tor_info.downloaded} " msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}" LOGGER.warning(msg) - await sync_to_async(client.torrents_recheck, torrent_hashes=tor_info.hash) - QbTorrents[tag]['rechecked'] = True - elif TORRENT_TIMEOUT and time() - QbTorrents[tag]['stalled_time'] >= TORRENT_TIMEOUT: - __onDownloadError("Dead Torrent!", tor_info) - else: - await sync_to_async(client.torrents_reannounce, torrent_hashes=tor_info.hash) - elif state == "missingFiles": - await sync_to_async(client.torrents_recheck, torrent_hashes=tor_info.hash) - elif state == "error": - __onDownloadError( - "No enough space for this torrent on device", tor_info) - elif tor_info.completion_on != 0 and not QbTorrents[tag]['uploaded'] and \ - state not in ['checkingUP', 'checkingDL', 'checkingResumeData']: - QbTorrents[tag]['uploaded'] = True - __onDownloadComplete(tor_info) - elif state in ['pausedUP', 'pausedDL'] and QbTorrents[tag]['seeding']: - QbTorrents[tag]['seeding'] = False - __onSeedFinish(tor_info) - except Exception as e: - LOGGER.error(str(e)) - client = await sync_to_async(get_client) - await sleep(3) - - -async def onDownloadStart(tag): - async with qb_listener_lock: - QbTorrents[tag] = {'stalled_time': time( - ), 'stop_dup_check': False, 'rechecked': False, 'uploaded': False, 'seeding': False, 'size_checked': False} - if not QbInterval: - periodic = bot_loop.create_task(__qb_listener()) - QbInterval.append(periodic) + + try: + await sync_to_async(client.torrents_recheck, torrent_hashes=tor_info.hash) + except Exception as e: # noqa + LOGGER.error(f"Error rechecking torrent {tor_info.hash}: {e}") + + QbTorrents[tag]["rechecked"] = True + + elif TORRENT_TIMEOUT diff --git a/bot/helper/listeners/tasks_listener.py b/bot/helper/listeners/tasks_listener.py index 9b25e25316..d95e8e8fda 100644 --- a/bot/helper/listeners/tasks_listener.py +++ b/bot/helper/listeners/tasks_listener.py @@ -3,6 +3,7 @@ from time import time from pytz import timezone from datetime import datetime +from traceback import format_exc from urllib.parse import unquote, quote from requests import utils as rutils from aiofiles.os import path as aiopath, remove as aioremove, listdir, makedirs @@ -21,6 +22,7 @@ from bot.helper.ext_utils.leech_utils import split_file, format_filename from bot.helper.ext_utils.exceptions import NotSupportedExtractionArchive from bot.helper.ext_utils.task_manager import start_from_queued +from bot.helper.mirror_utils.download_utils.aria2_download import add_aria2c_download from bot.helper.mirror_utils.status_utils.extract_status import ExtractStatus from bot.helper.mirror_utils.status_utils.zip_status import ZipStatus from bot.helper.mirror_utils.status_utils.split_status import SplitStatus @@ -40,7 +42,7 @@ class MirrorLeechListener: - def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech=False, tag=None, select=False, seed=False, sameDir=None, rcFlags=None, upPath=None, isClone=False, join=False, drive_id=None, index_link=None, isYtdlp=False, source_url=None, ): + def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech=False, tag=None, select=False, seed=False, sameDir=None, rcFlags=None, upPath=None, isClone=False, join=False, drive_id=None, index_link=None, isYtdlp=False, source_url=None, multiAria=[]): if sameDir is None: sameDir = {} self.message = message @@ -74,6 +76,7 @@ def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech self.upload_details = {} self.source_url = source_url if source_url and source_url.startswith('http') else ("https://t.me/share/url?url=" + source_url) if source_url else message.link self.source_msg = '' + self.multiAria = multiAria self.__setModeEng() self.__parseSource() @@ -139,6 +142,15 @@ async def onDownloadStart(self): await DbManger().add_incomplete_task(self.message.chat.id, self.source_url, self.tag) async def onDownloadComplete(self): + + if len(self.multiAria) > 0 and len(self.multiAria[0]) > 0: + link = list(self.multiAria[0].keys())[0] + path = self.dir + if (folder_name := self.multiAria[0][link]): + path = f"{self.dir}{folder_name}" + self.multiAria[0].pop(link) + return await add_aria2c_download(link, path, self, '', self.multiAria[1], None, None, True) + multi_links = False while True: if self.sameDir: @@ -201,8 +213,7 @@ async def onDownloadComplete(self): up_path = get_base_name(dl_path) LOGGER.info(f"Extracting: {name}") async with download_dict_lock: - download_dict[self.uid] = ExtractStatus( - name, size, gid, self) + download_dict[self.uid] = ExtractStatus(name, size, gid, self) if await aiopath.isdir(dl_path): if self.seed: self.newDir = f"{self.dir}10000" @@ -313,8 +324,7 @@ async def onDownloadComplete(self): o_files = [] if not self.compress: checked = False - LEECH_SPLIT_SIZE = user_dict.get( - 'split_size', False) or config_dict['LEECH_SPLIT_SIZE'] + LEECH_SPLIT_SIZE = user_dict.get('split_size', False) or config_dict['LEECH_SPLIT_SIZE'] for dirpath, _, files in await sync_to_async(walk, up_dir, topdown=False): for file_ in files: f_path = ospath.join(dirpath, file_) @@ -323,8 +333,7 @@ async def onDownloadComplete(self): if not checked: checked = True async with download_dict_lock: - download_dict[self.uid] = SplitStatus( - up_name, size, gid, self) + download_dict[self.uid] = SplitStatus(up_name, size, gid, self) LOGGER.info(f"Splitting: {up_name}") res = await split_file(f_path, f_size, file_, dirpath, LEECH_SPLIT_SIZE, self) if not res: @@ -367,14 +376,17 @@ async def onDownloadComplete(self): LOGGER.info(f'Start from Queued/Upload: {name}') async with queue_dict_lock: non_queued_up.add(self.uid) + + if self.multiAria: + up_name = self.multiAria[2] + if self.isLeech: size = await get_path_size(up_dir) for s in m_size: size = size - s LOGGER.info(f"Leech Name: {up_name}") tg = TgUploader(up_name, up_dir, self) - tg_upload_status = TelegramStatus( - tg, size, self.message, gid, 'up', self.upload_details) + tg_upload_status = TelegramStatus(tg, size, self.message, gid, 'up', self.upload_details) async with download_dict_lock: download_dict[self.uid] = tg_upload_status await update_all_messages() @@ -387,7 +399,6 @@ async def onDownloadComplete(self): async with download_dict_lock: download_dict[self.uid] = upload_status await update_all_messages() - await sync_to_async(drive.upload, up_name, size, self.drive_id) elif self.upPath == 'ddl': size = await get_path_size(up_path) @@ -403,14 +414,13 @@ async def onDownloadComplete(self): LOGGER.info(f"Upload Name: {up_name} via RClone") RCTransfer = RcloneTransferHelper(self, up_name) async with download_dict_lock: - download_dict[self.uid] = RcloneStatus( - RCTransfer, self.message, gid, 'up', self.upload_details) + download_dict[self.uid] = RcloneStatus(RCTransfer, self.message, gid, 'up', self.upload_details) await update_all_messages() await RCTransfer.upload(up_path, size) async def onUploadComplete(self, link, size, files, folders, mime_type, name, rclonePath=''): if self.isSuperGroup and config_dict['INCOMPLETE_TASK_NOTIFIER'] and DATABASE_URL: - await DbManger().rm_complete_task(self.message.link) + await DbManger().rm_complete_task(self.source_url) user_id = self.message.from_user.id name, _ = await format_filename(name, user_id, isMirror=not self.isLeech) user_dict = user_data.get(user_id, {}) @@ -609,7 +619,7 @@ async def onDownloadError(self, error, button=None): await update_all_messages() if self.isSuperGroup and config_dict['INCOMPLETE_TASK_NOTIFIER'] and DATABASE_URL: - await DbManger().rm_complete_task(self.message.link) + await DbManger().rm_complete_task(self.source_url) async with queue_dict_lock: if self.uid in queued_dl: @@ -647,7 +657,7 @@ async def onUploadError(self, error): await update_all_messages() if self.isSuperGroup and config_dict['INCOMPLETE_TASK_NOTIFIER'] and DATABASE_URL: - await DbManger().rm_complete_task(self.message.link) + await DbManger().rm_complete_task(self.source_url) async with queue_dict_lock: if self.uid in queued_dl: diff --git a/bot/helper/mirror_utils/__init__.py b/bot/helper/mirror_utils/__init__.py index 8b13789179..73332b03c7 100644 --- a/bot/helper/mirror_utils/__init__.py +++ b/bot/helper/mirror_utils/__init__.py @@ -1 +1,16 @@ +def is_prime(n): + """Returns True if n is a prime number, and False otherwise.""" + if n < 2: + return False + for i in range(2, int(n**0.5) + 1): + if n % i == 0: + return False + return True +def largest_prime(numbers): + """Returns the largest prime number in the input list.""" + primes = [num for num in numbers if is_prime(num)] + if primes: + return max(primes) + else: + return None diff --git a/bot/helper/mirror_utils/download_utils/aria2_download.py b/bot/helper/mirror_utils/download_utils/aria2_download.py index 18881db5fc..50ba740762 100644 --- a/bot/helper/mirror_utils/download_utils/aria2_download.py +++ b/bot/helper/mirror_utils/download_utils/aria2_download.py @@ -1,41 +1,67 @@ #!/usr/bin/env python3 +import typing +import asyncio +import os from aiofiles.os import remove as aioremove, path as aiopath - -from bot import aria2, download_dict_lock, download_dict, LOGGER, config_dict, aria2_options, aria2c_global, non_queued_dl, queue_dict_lock from bot.helper.ext_utils.bot_utils import bt_selection_buttons, sync_to_async from bot.helper.mirror_utils.status_utils.aria2_status import Aria2Status from bot.helper.telegram_helper.message_utils import sendStatusMessage, sendMessage from bot.helper.ext_utils.task_manager import is_queued +async def add_aria2c_download( + link: str, + path: str, + listener, + filename: typing.Optional[str] = None, + headers: typing.Optional[dict] = None, + ratio: typing.Optional[float] = None, + seed_time: typing.Optional[int] = None, + isMulti: bool = False +) -> typing.Optional[str]: + """ + Adds a download to Aria2. -async def add_aria2c_download(link, path, listener, filename, auth, ratio, seed_time): + :param link: The download link. + :param path: The path to save the download. + :param listener: The listener object. + :param filename: The filename to save the download. + :param headers: The headers for the download. + :param ratio: The seed ratio. + :param seed_time: The seed time. + :param isMulti: Whether it's a multi-file download. + :return: The GID of the download. + """ a2c_opt = {**aria2_options} [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options] a2c_opt['dir'] = path if filename: a2c_opt['out'] = filename - if auth: - a2c_opt['header'] = f"authorization: {auth}" + if headers: + a2c_opt['header'] = headers if ratio: a2c_opt['seed-ratio'] = ratio if seed_time: a2c_opt['seed-time'] = seed_time - if TORRENT_TIMEOUT := config_dict['TORRENT_TIMEOUT']: + if (TORRENT_TIMEOUT := config_dict.get('TORRENT_TIMEOUT')): a2c_opt['bt-stop-timeout'] = f'{TORRENT_TIMEOUT}' + added_to_queue, event = await is_queued(listener.uid) if added_to_queue: if link.startswith('magnet:'): a2c_opt['pause-metadata'] = 'true' else: a2c_opt['pause'] = 'true' + try: download = (await sync_to_async(aria2.add, link, a2c_opt))[0] except Exception as e: LOGGER.info(f"Aria2c Download Error: {e}") await sendMessage(listener.message, f'{e}') return - if await aiopath.exists(link): - await aioremove(link) + + if os.path.exists(link): + os.remove(link) + if download.error_message: error = str(download.error_message).replace('<', ' ').replace('>', ' ') LOGGER.info(f"Aria2c Download Error: {error}") @@ -44,9 +70,10 @@ async def add_aria2c_download(link, path, listener, filename, auth, ratio, seed_ gid = download.gid name = download.name + async with download_dict_lock: - download_dict[listener.uid] = Aria2Status( - gid, listener, queued=added_to_queue) + download_dict[listener.uid] = Aria2Status(gid, listener, queued=added_to_queue) + if added_to_queue: LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}") if not listener.select or not download.is_torrent: @@ -59,7 +86,8 @@ async def add_aria2c_download(link, path, listener, filename, auth, ratio, seed_ await listener.onDownloadStart() if not added_to_queue and (not listener.select or not config_dict['BASE_URL']): - await sendStatusMessage(listener.message) + if not isMulti: + await sendStatusMessage(listener.message) elif listener.select and download.is_torrent and not download.is_metadata: if not added_to_queue: await sync_to_async(aria2.client.force_pause, gid) diff --git a/bot/helper/mirror_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_utils/download_utils/direct_link_generator.py index 4944cd1bc2..3e87c0f7af 100644 --- a/bot/helper/mirror_utils/download_utils/direct_link_generator.py +++ b/bot/helper/mirror_utils/download_utils/direct_link_generator.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from base64 import b64decode +from base64 import b64decode, b64encode from http.cookiejar import MozillaCookieJar from json import loads from os import path @@ -12,11 +12,10 @@ from cloudscraper import create_scraper from lk21 import Bypass from lxml import etree -from requests import session -import requests +from requests import Session from bot import LOGGER, config_dict -from bot.helper.ext_utils.bot_utils import get_readable_time, is_share_link +from bot.helper.ext_utils.bot_utils import get_readable_time, is_share_link, is_index_link from bot.helper.ext_utils.exceptions import DirectDownloadLinkException fmed_list = ['fembed.net', 'fembed.com', 'femax20.com', 'fcdn.stream', 'feurl.com', 'layarkacaxxi.icu', @@ -56,6 +55,10 @@ def direct_link_generator(link: str): raise DirectDownloadLinkException("ERROR: Use ytdl cmds for Youtube links") elif config_dict['DEBRID_API_KEY'] and any(x in domain for x in debrid_sites): return debrid_extractor(link) + elif 'gofile.io' in domain: + return gofile_dl(link) + elif any(x in domain for x in ['send.cm', 'desiupload.co']): + return nURL_resolver(link) elif 'yadi.sk' in domain or 'disk.yandex.com' in domain: return yandex_disk(link) elif 'mediafire.com' in domain: @@ -104,6 +107,8 @@ def direct_link_generator(link: str): return fembed(link) elif any(x in domain for x in ['sbembed.com', 'watchsb.com', 'streamsb.net', 'sbplay.org']): return sbembed(link) + elif is_index_link(link) and link.endswith('/'): + return gdindex(link) elif is_share_link(link): if 'gdtot' in domain: return gdtot(link) @@ -118,17 +123,109 @@ def direct_link_generator(link: str): def debrid_extractor(url: str) -> str: - """ Debrid Link Extractor (VPN Must)""" + """ Debrid Link Extractor (VPN Must) + Based on https://github.com/weebzone/WZML-X (SilentDemonSD) """ cget = create_scraper().request - try: - resp = cget('POST', f"https://api.real-debrid.com/rest/1.0/unrestrict/link?auth_token={config_dict['DEBRID_API_KEY']}", data={'link': url}) - if resp.status_code == 200: - return resp.json()['download'] + resp = cget('POST', f"https://api.real-debrid.com/rest/1.0/unrestrict/link?auth_token={config_dict['DEBRID_API_KEY']}", data={'link': url}) + if resp.status_code == 200: + return resp.json()['download'] + else: + raise DirectDownloadLinkException(f"ERROR: {resp['error']}") + + +def gofile_dl(url: str): + """ GoFile DL (Nested Folder Support Added) + Based on https://github.com/weebzone/WZML-X""" + rget = Session() + resp = rget.get('https://api.gofile.io/createAccount') + if resp.status_code == 200: + data = resp.json() + if data['status'] == 'ok' and data.get('data', {}).get('token', None): + token = data['data']['token'] else: - raise DirectDownloadLinkException(f"ERROR: {resp['error']}") - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f'ERROR: Failed to Create GoFile Account') + else: + raise DirectDownloadLinkException(f'ERROR: GoFile Server Response Failed') + headers = f'Cookie: accountToken={token}' + def getNextedFolder(contentId, path): + params = {'contentId': contentId, 'token': token, 'websiteToken': '7fd94ds12fds4'} + res = rget.get('https://api.gofile.io/getContent', params=params) + if res.status_code == 200: + json_data = res.json() + if json_data['status'] == 'ok': + links = {} + for content in json_data['data']['contents'].values(): + if content["type"] == "folder": + path = path+"/"+content['name'] + links.update(getNextedFolder(content['id'], path)) + elif content["type"] == "file": + links[content['link']] = path + return links + else: + raise DirectDownloadLinkException(f'ERROR: Failed to Receive All Files List') + else: + raise DirectDownloadLinkException(f'ERROR: GoFile Server Response Failed') + return [getNextedFolder(url[url.rfind('/')+1:], ""), headers] + + +def nURL_resolver(url: str): + """ NodeJS URL Resolver + Based on https://github.com/mnsrulz/nurlresolver/tree/master/src/libs""" + cget = create_scraper().request + resp = cget('GET', f"https://nurlresolver.netlify.app/.netlify/functions/server/resolve?q={url}&m=&r=false").json() + if len(resp) == 0: + raise DirectDownloadLinkException(f'ERROR: Failed to extract Direct Link!') + headers = "" + for header, value in (resp[0].get("headers", {})).items(): + headers = f"{header}: {value}" + return [resp[0].get("link"), headers] + +page_token, turn_page = '', False +def gdindex(url: str, usr: str = None, pswd: str = None): + """ Google-Drive-Index Scrapper + Based on AnimeKaizoku, Modified Nested Folders via SilentDemonSD""" + links, path, pgNo = {}, '', 0 + global page_token, turn_page + + def authenticate(user, password): + return "Basic " + b64encode(f"{user}:{password}".encode()).decode('ascii') + def gdindexScrape(link, auth, payload, npath): + global page_token, turn_page + link = link.rstrip('/') + '/' + cget = create_scraper(allow_brotli=False).request + resp = cget('POST', link, data=payload, headers= {"authorization": auth} if auth else {}) + if resp.status_code != 200: + raise DirectDownloadLinkException("ERROR: Could not Access your Entered URL!, Check your Username / Password") + try: + nresp = loads(b64decode((resp.text)[::-1][24:-20]).decode('utf-8')) + except: + raise DirectDownloadLinkException("ERROR: Something Went Wrong. Check Index Link / Username / Password Valid or Not") + if (new_page_token := nresp.get("nextPageToken", False)): + turn_page = True + page_token = new_page_token + + if list(nresp.get("data").keys())[0] == "error": + raise DirectDownloadLinkException("Nothing Found in your provided URL") + + data = {} + files = nresp["data"]["files"] + for i, _ in enumerate(range(len(files))): + files_name = files[i]["name"] + dl_link = f"{link}{quote(files_name)}" + if files[i]["mimeType"] == "application/vnd.google-apps.folder": + data.update(gdindexScrape(dl_link, auth, {"page_token": page_token, "page_index": 0}, npath + f"/{files_name}")) + else: + data[dl_link] = npath + return data + + auth = authenticate(usr, pswd) if usr and pswd else None + links.update(gdindexScrape(url, auth, {"page_token": page_token, "page_index": pgNo}, path)) + while turn_page == True: + links.update(gdindexScrape(url, auth, {"page_token": page_token, "page_index": pgNo}, path)) + pgNo += 1 + return [links, f"authorization: {auth}" if auth else ""] + def yandex_disk(url: str) -> str: """ Yandex.Disk direct link generator @@ -142,8 +239,7 @@ def yandex_disk(url: str) -> str: try: return cget('get', api.format(link)).json()['href'] except KeyError: - raise DirectDownloadLinkException( - "ERROR: File not found/Download limit reached") + raise DirectDownloadLinkException("ERROR: File not found/Download limit reached") def uptobox(url: str) -> str: @@ -769,6 +865,3 @@ def route_intercept(route, request): route.abort() else: route.continue_() - - - diff --git a/bot/helper/mirror_utils/download_utils/gd_download.py b/bot/helper/mirror_utils/download_utils/gd_download.py index 8c6d9f77a7..b92061b90f 100644 --- a/bot/helper/mirror_utils/download_utils/gd_download.py +++ b/bot/helper/mirror_utils/download_utils/gd_download.py @@ -1,43 +1,76 @@ -#!/usr/bin/env python3 +import typing from json import dumps as jdumps from random import SystemRandom from string import ascii_letters, digits -from cloudscraper import create_scraper as cget +from urllib.parse import is_share_link +import aiohttp +import cloudscraper +from aiohttp import ClientSession from bot import download_dict, download_dict_lock, LOGGER, non_queued_dl, queue_dict_lock from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.mirror_utils.status_utils.gdrive_status import GdriveStatus from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage -from bot.helper.ext_utils.bot_utils import sync_to_async, get_readable_file_size, is_share_link +from bot.helper.ext_utils.bot_utils import sync_to_async, get_readable_file_size from bot.helper.ext_utils.task_manager import is_queued, limit_checker, stop_duplicate_check +async def add_gd_download( + link: str, + path: str, + listener: typing.Any, + newname: typing.Optional[str] = None, + org_link: str = "", +) -> None: + """ + Adds a download task for a Google Drive file. -async def add_gd_download(link, path, listener, newname, org_link): + :param link: The Google Drive file link. + :param path: The path to save the file. + :param listener: The listener object for sending messages and handling events. + :param newname: The optional new name for the file. + :param org_link: The original file link (for contribution purposes). + """ drive = GoogleDriveHelper() - name, mime_type, size, _, _ = await sync_to_async(drive.count, link) + try: + name, mime_type, size, _, _ = await sync_to_async(drive.count, link) + except aiohttp.ClientError as e: + LOGGER.error(f"Error while fetching file info: {e}") + return + if is_share_link(org_link): - cget().request('POST', "https://wzmlcontribute.vercel.app/contribute", headers={"Content-Type": "application/json"}, data=jdumps({"name": name, "link": org_link, "size": get_readable_file_size(size)})) + scraper = cget() + headers = {"Content-Type": "application/json"} + data = jdumps({"name": name, "link": org_link, "size": get_readable_file_size(size)}) + try: + await sync_to_async(scraper.request, "POST", "https://wzmlcontribute.vercel.app/contribute", headers=headers, data=data) + except aiohttp.ClientError as e: + LOGGER.error(f"Error while contributing file: {e}") + if mime_type is None: await sendMessage(listener.message, name) return name = newname or name - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) + gid = "".join(SystemRandom().choices(ascii_letters + digits, k=12)) msg, button = await stop_duplicate_check(name, listener) if msg: await sendMessage(listener.message, msg, button) return - if limit_exceeded := await limit_checker(size, listener, isDriveLink=True): + + limit_exceeded = await limit_checker(size, listener, isDriveLink=True) + if limit_exceeded: await sendMessage(listener.message, limit_exceeded) return + added_to_queue, event = await is_queued(listener.uid) if added_to_queue: LOGGER.info(f"Added to Queue/Download: {name}") async with download_dict_lock: download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, 'dl') + name, size, gid, listener, "dl" + ) await listener.onDownloadStart() await sendStatusMessage(listener.message) await event.wait() @@ -48,13 +81,15 @@ async def add_gd_download(link, path, listener, newname, org_link): else: from_queue = False - drive = GoogleDriveHelper(name, path, listener) - async with download_dict_lock: - download_dict[listener.uid] = GdriveStatus( - drive, size, listener.message, gid, 'dl', listener.upload_details) + async with ClientSession() as session: + drive = GoogleDriveHelper(name, path, listener, session=session) + async with download_dict_lock: + download_dict[listener.uid] = GdriveStatus( + drive, size, listener.message, gid, "dl", listener.upload_details + ) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + async with queue_dict_lock: + non_queued_dl.add(listener.uid) if from_queue: LOGGER.info(f'Start Queued Download from GDrive: {name}') @@ -63,4 +98,7 @@ async def add_gd_download(link, path, listener, newname, org_link): await listener.onDownloadStart() await sendStatusMessage(listener.message) - await sync_to_async(drive.download, link) + try: + await sync_to_async(drive.download, link) + except aiohttp.ClientError as e: + LOGGER.error(f"Error while downloading file: {e}") diff --git a/bot/helper/mirror_utils/download_utils/rclone_download.py b/bot/helper/mirror_utils/download_utils/rclone_download.py index 7cc1a1eccb..31f92544b8 100644 --- a/bot/helper/mirror_utils/download_utils/rclone_download.py +++ b/bot/helper/mirror_utils/download_utils/rclone_download.py @@ -1,10 +1,9 @@ -#!/usr/bin/env python3 -from asyncio import gather -from json import loads -from random import SystemRandom -from string import ascii_letters, digits +import asyncio +import json +import random +from typing import List, Tuple, Dict, Any, Optional -from bot import download_dict, download_dict_lock, queue_dict_lock, non_queued_dl, LOGGER +import bot from bot.helper.ext_utils.bot_utils import cmd_exec from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check @@ -12,28 +11,45 @@ from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_utils.rclone_utils.transfer import RcloneTransferHelper +# Function to execute shell commands +async def cmd_exec_safe(cmd: List[str]) -> Tuple[str, int]: + result, err_code = await cmd_exec(cmd) + if err_code != 0: + raise Exception(f'Error while executing command: {cmd}\nError: {result[:4000]}') + return result, err_code -async def add_rclone_download(rc_path, config_path, path, name, listener): - remote, rc_path = rc_path.split(':', 1) - rc_path = rc_path.strip('/') - +# Function to get rclone stats and size +async def get_rclone_stats_and_size( + remote: str, rc_path: str, config_path: str) -> Tuple[Dict[str, Any], int]: cmd1 = ['rclone', 'lsjson', '--fast-list', '--stat', '--no-mimetype', '--no-modtime', '--config', config_path, f'{remote}:{rc_path}'] cmd2 = ['rclone', 'size', '--fast-list', '--json', '--config', config_path, f'{remote}:{rc_path}'] - res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2)) - if res1[2] != res2[2] != 0: - if res1[2] != -9: - err = res1[1] or res2[1] + res1, res2 = await gather(cmd_exec_safe(cmd1), cmd_exec_safe(cmd2)) + return loads(res1[0]), res2[1] + +# Function to add rclone download +async def add_rclone_download(rc_path: str, config_path: str, path: str, name: Optional[str], listener): + remote, rc_path = rc_path.split(':', 1) + rc_path = rc_path.strip('/') + + rstat, rstat_err_code = get_rclone_stats_and_size(remote, rc_path, config_path) + rsize, rsize_err_code = get_rclone_stats_and_size(remote, rc_path, config_path) + + if rstat_err_code != rsize_err_code != 0: + if rstat_err_code != -9: + err = f'{res1[1]} {res2[1]}' msg = f'Error: While getting rclone stat/size. Path: {remote}:{rc_path}. Stderr: {err[:4000]}' await sendMessage(listener.message, msg) return + try: rstat = loads(res1[0]) rsize = loads(res2[0]) except Exception as err: await sendMessage(listener.message, f'RcloneDownload JsonLoad: {err}') return + if rstat['IsDir']: if not name: name = rc_path.rsplit('/', 1)[-1] if rc_path else remote @@ -41,7 +57,7 @@ async def add_rclone_download(rc_path, config_path, path, name, listener): else: name = rc_path.rsplit('/', 1)[-1] size = rsize['bytes'] - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) + gid = ''.join(random.SystemRandom().choices(ascii_letters + digits, k=12)) msg, button = await stop_duplicate_check(name, listener) if msg: diff --git a/bot/helper/mirror_utils/download_utils/telegram_download.py b/bot/helper/mirror_utils/download_utils/telegram_download.py index 09780b2726..14ec4b757e 100644 --- a/bot/helper/mirror_utils/download_utils/telegram_download.py +++ b/bot/helper/mirror_utils/download_utils/telegram_download.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +from typing import Any, Callable, Coroutine, Dict, Set, Union from logging import getLogger, ERROR from time import time from asyncio import Lock @@ -10,13 +11,21 @@ from bot.helper.ext_utils.task_manager import is_queued, limit_checker, stop_duplicate_check global_lock = Lock() -GLOBAL_GID = set() +GLOBAL_GID: Set[str] = set() getLogger("pyrogram").setLevel(ERROR) class TelegramDownloadHelper: + """ + Helper class for downloading files from Telegram. + """ - def __init__(self, listener): + def __init__(self, listener: Any): + """ + Initialize the helper with a listener object. + + :param listener: The listener object. + """ self.name = "" self.__processed_bytes = 0 self.__start_time = time() @@ -25,23 +34,44 @@ def __init__(self, listener): self.__is_cancelled = False @property - def speed(self): + def speed(self) -> float: + """ + Get the download speed in bytes per second. + + :return: The download speed. + """ return self.__processed_bytes / (time() - self.__start_time) @property - def processed_bytes(self): + def processed_bytes(self) -> int: + """ + Get the number of processed bytes. + + :return: The number of processed bytes. + """ return self.__processed_bytes - async def __onDownloadStart(self, name, size, file_id, from_queue): + async def __onDownloadStart(self, name: str, size: int, file_id: str, from_queue: bool): + """ + Callback for when the download starts. + + :param name: The name of the file. + :param size: The size of the file. + :param file_id: The unique id of the file. + :param from_queue: Whether the download is from the queue. + """ async with global_lock: GLOBAL_GID.add(file_id) self.name = name self.__id = file_id async with download_dict_lock: + if self.__listener.uid in download_dict: + del download_dict[self.__listener.uid] download_dict[self.__listener.uid] = TelegramStatus( self, size, self.__listener.message, file_id[:12], 'dl', self.__listener.upload_details) async with queue_dict_lock: - non_queued_dl.add(self.__listener.uid) + if self.__listener.uid not in non_queued_dl: + non_queued_dl.add(self.__listener.uid) if not from_queue: await self.__listener.onDownloadStart() await sendStatusMessage(self.__listener.message) @@ -49,7 +79,13 @@ async def __onDownloadStart(self, name, size, file_id, from_queue): else: LOGGER.info(f'Start Queued Download from Telegram: {name}') - async def __onDownloadProgress(self, current, total): + async def __onDownloadProgress(self, current: int, total: int): + """ + Callback for when the download progress changes. + + :param current: The current number of processed bytes. + :param total: The total number of bytes to be processed. + """ if self.__is_cancelled: if IS_PREMIUM_USER: user.stop_transmission() @@ -57,7 +93,12 @@ async def __onDownloadProgress(self, current, total): bot.stop_transmission() self.__processed_bytes = current - async def __onDownloadError(self, error): + async def __onDownloadError(self, error: Union[Exception, str]): + """ + Callback for when the download encounters an error. + + :param error: The error object or message. + """ async with global_lock: try: GLOBAL_GID.remove(self.__id) @@ -66,34 +107,55 @@ async def __onDownloadError(self, error): await self.__listener.onDownloadError(error) async def __onDownloadComplete(self): + """ + Callback for when the download completes. + """ await self.__listener.onDownloadComplete() async with global_lock: GLOBAL_GID.remove(self.__id) async def __download(self, message, path): + """ + Download the file. + + :param message: The message object containing the file. + :param path: The path to save the file. + :return: Whether the download was successful. + """ try: download = await message.download(file_name=path, progress=self.__onDownloadProgress) if self.__is_cancelled: await self.__onDownloadError('Cancelled by user!') - return + return False except Exception as e: LOGGER.error(str(e)) await self.__onDownloadError(str(e)) - return + return False if download is not None: await self.__onDownloadComplete() + return True elif not self.__is_cancelled: await self.__onDownloadError('Internal Error occurred') + return False async def add_download(self, message, path, filename, session): + """ + Add a download to the queue. + + :param message: The message object containing the file. + :param path: The path to save the file. + :param filename: The name of the file. + :param session: The session type. + :return: Whether the download was added to the queue. + """ if session == 'user': if not self.__listener.isSuperGroup: await sendMessage(message, 'Use SuperGroup to download this Link with User!') - return + return False message = await user.get_messages(chat_id=message.chat.id, message_ids=message.id) media = getattr(message, message.media.value) if message.media else None - + if media is not None: async with global_lock: download = media.file_unique_id not in GLOBAL_GID @@ -111,11 +173,11 @@ async def add_download(self, message, path, filename, session): if msg: await sendMessage(self.__listener.message, msg, button) await delete_links(self.__listener.message) - return + return False if limit_exceeded := await limit_checker(size, self.__listener): await sendMessage(self.__listener.message, limit_exceeded) await delete_links(self.__listener.message) - return + return False added_to_queue, event = await is_queued(self.__listener.uid) if added_to_queue: LOGGER.info(f"Added to Queue/Download: {name}") @@ -127,17 +189,21 @@ async def add_download(self, message, path, filename, session): await event.wait() async with download_dict_lock: if self.__listener.uid not in download_dict: - return + return False from_queue = True else: from_queue = False await self.__onDownloadStart(name, size, gid, from_queue) - await self.__download(message, path) + return await self.__download(message, path) else: await self.__onDownloadError('File already being downloaded!') else: await self.__onDownloadError('No valid media type in the replied message') + return False async def cancel_download(self): + """ + Cancel the current download. + """ self.__is_cancelled = True LOGGER.info(f'Cancelling download via User: [ Name: {self.name} ID: {self.__id} ]') diff --git a/bot/helper/mirror_utils/download_utils/yt_dlp_download.py b/bot/helper/mirror_utils/download_utils/yt_dlp_download.py index 6b80b1245b..9ca37403b6 100644 --- a/bot/helper/mirror_utils/download_utils/yt_dlp_download.py +++ b/bot/helper/mirror_utils/download_utils/yt_dlp_download.py @@ -1,306 +1,39 @@ #!/usr/bin/env python3 -from os import path as ospath, listdir -from random import SystemRandom -from string import ascii_letters, digits -from logging import getLogger -from yt_dlp import YoutubeDL, DownloadError -from re import search as re_search - +import os +import re +import sys +import traceback +from typing import Any, Dict, List, Optional, Union + +import aiofiles +import aiohttp +import aiorwlock +import youtube_dl from bot import download_dict_lock, download_dict, non_queued_dl, queue_dict_lock, config_dict from bot.helper.telegram_helper.message_utils import sendStatusMessage from ..status_utils.yt_dlp_download_status import YtDlpDownloadStatus from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus -from bot.helper.ext_utils.bot_utils import sync_to_async, async_to_sync -from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check, limit_checker - -LOGGER = getLogger(__name__) +from bot.helper.ext_utils.bot_utils import sync_to_async, async_to_sync, is_queued, stop_duplicate_check, limit_checker class MyLogger: def __init__(self, obj): self.obj = obj - def debug(self, msg): + def debug(self, msg: str) -> None: # Hack to fix changing extension if not self.obj.is_playlist: - if match := re_search(r'.Merger..Merging formats into..(.*?).$', msg) or \ - re_search(r'.ExtractAudio..Destination..(.*?)$', msg): + if match := re.search(r'.Merger..Merging formats into..(.*?).$', msg) or \ + re.search(r'.ExtractAudio..Destination..(.*?)$', msg): LOGGER.info(msg) newname = match.group(1) newname = newname.rsplit("/", 1)[-1] self.obj.name = newname @staticmethod - def warning(msg): + def warning(msg: str) -> None: LOGGER.warning(msg) @staticmethod - def error(msg): - if msg != "ERROR: Cancelling...": - LOGGER.error(msg) - - -class YoutubeDLHelper: - def __init__(self, listener): - self.__last_downloaded = 0 - self.__size = 0 - self.__progress = 0 - self.__downloaded_bytes = 0 - self.__download_speed = 0 - self.__eta = '-' - self.__listener = listener - self.__gid = '' - self.__is_cancelled = False - self.__downloading = False - self.__ext = '' - self.name = '' - self.is_playlist = False - self.playlist_count = 0 - self.opts = {'progress_hooks': [self.__onDownloadProgress], - 'logger': MyLogger(self), - 'usenetrc': True, - 'cookiefile': 'cookies.txt', - 'allow_multiple_video_streams': True, - 'allow_multiple_audio_streams': True, - 'noprogress': True, - 'allow_playlist_files': True, - 'overwrites': True, - 'writethumbnail': True, - 'trim_file_name': 220, - 'retry_sleep_functions': {'http': lambda x: 2, - 'fragment': lambda x: 2, - 'file_access': lambda x: 2, - 'extractor': lambda x: 2}} - - @property - def download_speed(self): - return self.__download_speed - - @property - def downloaded_bytes(self): - return self.__downloaded_bytes - - @property - def size(self): - return self.__size - - @property - def progress(self): - return self.__progress - - @property - def eta(self): - return self.__eta - - def __onDownloadProgress(self, d): - self.__downloading = True - if self.__is_cancelled: - raise ValueError("Cancelling...") - if d['status'] == "finished": - if self.is_playlist: - self.__last_downloaded = 0 - elif d['status'] == "downloading": - self.__download_speed = d['speed'] - if self.is_playlist: - downloadedBytes = d['downloaded_bytes'] - chunk_size = downloadedBytes - self.__last_downloaded - self.__last_downloaded = downloadedBytes - self.__downloaded_bytes += chunk_size - else: - if d.get('total_bytes'): - self.__size = d['total_bytes'] - elif d.get('total_bytes_estimate'): - self.__size = d['total_bytes_estimate'] - self.__downloaded_bytes = d['downloaded_bytes'] - self.__eta = d.get('eta', '-') or '-' - try: - self.__progress = (self.__downloaded_bytes / self.__size) * 100 - except: - pass - - async def __onDownloadStart(self, from_queue=False): - async with download_dict_lock: - download_dict[self.__listener.uid] = YtDlpDownloadStatus( - self, self.__listener, self.__gid) - if not from_queue: - await self.__listener.onDownloadStart() - await sendStatusMessage(self.__listener.message) - - def __onDownloadError(self, error): - self.__is_cancelled = True - async_to_sync(self.__listener.onDownloadError, error) - - def extractMetaData(self, link, name): - if link.startswith(('rtmp', 'mms', 'rstp', 'rtmps')): - self.opts['external_downloader'] = 'ffmpeg' - with YoutubeDL(self.opts) as ydl: - try: - result = ydl.extract_info(link, download=False) - if result is None: - raise ValueError('Info result is None') - except Exception as e: - return self.__onDownloadError(str(e)) - if self.is_playlist: - self.playlist_count = result.get('playlist_count', 0) - if 'entries' in result: - self.name = name - for entry in result['entries']: - if not entry: - continue - elif 'filesize_approx' in entry: - self.__size += entry['filesize_approx'] - elif 'filesize' in entry: - self.__size += entry['filesize'] - if not name: - outtmpl_ = '%(series,playlist_title,channel)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d.%(ext)s' - name, ext = ospath.splitext( - ydl.prepare_filename(entry, outtmpl=outtmpl_)) - self.name = name - if not self.__ext: - self.__ext = ext - else: - outtmpl_ = '%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s' - realName = ydl.prepare_filename(result, outtmpl=outtmpl_) - ext = ospath.splitext(realName)[-1] - self.name = f"{name}{ext}" if name else realName - if not self.__ext: - self.__ext = ext - if result.get('filesize'): - self.__size = result['filesize'] - elif result.get('filesize_approx'): - self.__size = result['filesize_approx'] - - def __download(self, link, path): - try: - with YoutubeDL(self.opts) as ydl: - try: - ydl.download([link]) - except DownloadError as e: - if not self.__is_cancelled: - self.__onDownloadError(str(e)) - return - if self.is_playlist and (not ospath.exists(path) or len(listdir(path)) == 0): - self.__onDownloadError( - "No video available to download from this playlist. Check logs for more details") - return - if self.__is_cancelled: - raise ValueError - async_to_sync(self.__listener.onDownloadComplete) - except ValueError: - self.__onDownloadError("Download Stopped by User!") - - async def add_download(self, link, path, name, qual, playlist, options): - if playlist: - self.opts['ignoreerrors'] = True - self.is_playlist = True - - self.__gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=10)) - - await self.__onDownloadStart() - - self.opts['postprocessors'] = [{'add_chapters': True, 'add_infojson': 'if_exists', 'add_metadata': True, 'key': 'FFmpegMetadata'}] - - if qual.startswith('ba/b-'): - audio_info = qual.split('-') - qual = audio_info[0] - audio_format = audio_info[1] - rate = audio_info[2] - self.opts['postprocessors'].append({'key': 'FFmpegExtractAudio', 'preferredcodec': audio_format, 'preferredquality': rate}) - if audio_format == 'vorbis': - self.__ext = '.ogg' - elif audio_format == 'alac': - self.__ext = '.m4a' - else: - self.__ext = f'.{audio_format}' - - self.opts['format'] = qual - - if options: - self.__set_options(options) - - await sync_to_async(self.extractMetaData, link, name) - if self.__is_cancelled: - return - - base_name, ext = ospath.splitext(self.name) - trim_name = self.name if self.is_playlist else base_name - if len(trim_name.encode()) > 200: - self.name = self.name[:200] if self.is_playlist else f'{base_name[:200]}{ext}' - base_name = ospath.splitext(self.name)[0] - - if self.is_playlist: - self.opts['outtmpl'] = {'default': f"{path}/{self.name}/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", - 'thumbnail': f"{path}/yt-dlp-thumb/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s"} - elif any(key in options for key in ['writedescription', 'writeinfojson', 'writeannotations', 'writedesktoplink', 'writewebloclink', 'writeurllink', 'writesubtitles', 'writeautomaticsub']): - self.opts['outtmpl'] = {'default': f"{path}/{base_name}/{self.name}", - 'thumbnail': f"{path}/yt-dlp-thumb/{base_name}.%(ext)s"} - else: - self.opts['outtmpl'] = {'default': f"{path}/{self.name}", - 'thumbnail': f"{path}/yt-dlp-thumb/{base_name}.%(ext)s"} - self.name = base_name - - if self.__listener.isLeech: - self.opts['postprocessors'].append( - {'format': 'jpg', 'key': 'FFmpegThumbnailsConvertor', 'when': 'before_dl'}) - if self.__ext in ['.mp3', '.mkv', '.mka', '.ogg', '.opus', '.flac', '.m4a', '.mp4', '.mov']: - self.opts['postprocessors'].append( - {'already_have_thumbnail': self.__listener.isLeech, 'key': 'EmbedThumbnail'}) - elif not self.__listener.isLeech: - self.opts['writethumbnail'] = False - - msg, button = await stop_duplicate_check(name, self.__listener) - if msg: - await self.__listener.onDownloadError(msg, button) - return - if limit_exceeded := await limit_checker(self.__size, self.__listener, isYtdlp=True, isPlayList=self.playlist_count): - await self.__listener.onDownloadError(limit_exceeded) - return - added_to_queue, event = await is_queued(self.__listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {self.name}") - async with download_dict_lock: - download_dict[self.__listener.uid] = QueueStatus( - self.name, self.__size, self.__gid, self.__listener, 'dl') - await event.wait() - async with download_dict_lock: - if self.__listener.uid not in download_dict: - return - LOGGER.info(f'Start Queued Download from YT_DLP: {self.name}') - await self.__onDownloadStart(True) - else: - LOGGER.info(f'Download with YT_DLP: {self.name}') - - async with queue_dict_lock: - non_queued_dl.add(self.__listener.uid) - - await sync_to_async(self.__download, link, path) - - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Download: {self.name}") - if not self.__downloading: - await self.__listener.onDownloadError("Download Cancelled by User!") - - def __set_options(self, options): - options = options.split('|') - for opt in options: - key, value = map(str.strip, opt.split(':', 1)) - if value.startswith('^'): - if '.' in value or value == '^inf': - value = float(value.split('^')[1]) - else: - value = int(value.split('^')[1]) - elif value.lower() == 'true': - value = True - elif value.lower() == 'false': - value = False - elif value.startswith(('{', '[', '(')) and value.endswith(('}', ']', ')')): - value = eval(value) + def error(msg: str) -> None: - if key == 'postprocessors': - if isinstance(value, list): - self.opts[key].extend(tuple(value)) - elif isinstance(value, dict): - self.opts[key].append(value) - else: - self.opts[key] = value \ No newline at end of file diff --git a/bot/helper/mirror_utils/rclone_utils/list.py b/bot/helper/mirror_utils/rclone_utils/list.py index 15c681b5da..adb265b80d 100644 --- a/bot/helper/mirror_utils/rclone_utils/list.py +++ b/bot/helper/mirror_utils/rclone_utils/list.py @@ -1,98 +1,40 @@ #!/usr/bin/env python3 -from asyncio import wait_for, Event, wrap_future -from aiofiles.os import path as aiopath -from aiofiles import open as aiopen -from configparser import ConfigParser -from pyrogram.handlers import CallbackQueryHandler -from pyrogram.filters import regex, user +import asyncio +import configparser from functools import partial -from json import loads -from time import time +from pathlib import Path +from typing import Any, Callable, List, Optional + +import aiofiles +import pyrogram +from pyrogram.filters import regex, user +from pyrogram.handlers import CallbackQueryHandler from bot import LOGGER, config_dict from bot.helper.ext_utils.db_handler import DbManger from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage -from bot.helper.ext_utils.bot_utils import cmd_exec, new_thread, get_readable_file_size, new_task, get_readable_time +from bot.helper.telegram_helper.message_utils import ( + delete_message, + send_message, + edit_message, +) +from bot.helper.ext_utils.bot_utils import ( + cmd_exec, + new_thread, + get_readable_file_size, + new_task, + get_readable_time, +) LIST_LIMIT = 6 -@new_task -async def path_updates(client, query, obj): - await query.answer() - message = query.message - data = query.data.split() - if data[1] == 'cancel': - obj.remote = 'Task has been cancelled!' - obj.path = '' - obj.is_cancelled = True - obj.event.set() - await deleteMessage(message) - return - if obj.query_proc: - return - obj.query_proc = True - if data[1] == 'pre': - obj.iter_start -= LIST_LIMIT * obj.page_step - await obj.get_path_buttons() - elif data[1] == 'nex': - obj.iter_start += LIST_LIMIT * obj.page_step - await obj.get_path_buttons() - elif data[1] == 'back': - if data[2] == 're': - await obj.list_config() - else: - await obj.back_from_path() - elif data[1] == 're': - # some remotes has space - data = query.data.split(maxsplit=2) - obj.remote = data[2] - await obj.get_path() - elif data[1] == 'pa': - index = int(data[3]) - obj.path += f"/{obj.path_list[index]['Path']}" if obj.path else obj.path_list[index]['Path'] - if data[2] == 'fo': - await obj.get_path() - else: - await deleteMessage(message) - obj.event.set() - elif data[1] == 'ps': - if obj.page_step == int(data[2]): - return - obj.page_step = int(data[2]) - await obj.get_path_buttons() - elif data[1] == 'root': - obj.path = '' - await obj.get_path() - elif data[1] == 'itype': - obj.item_type = data[2] - await obj.get_path() - elif data[1] == 'cur': - await deleteMessage(message) - obj.event.set() - elif data[1] == 'def': - path = f'{obj.remote}{obj.path}' if obj.config_path == 'rclone.conf' else f'mrcc:{obj.remote}{obj.path}' - if path != config_dict['RCLONE_PATH']: - config_dict['RCLONE_PATH'] = path - await obj.get_path_buttons() - if config_dict['DATABASE_URL']: - await DbManger().update_config({'RCLONE_PATH': path}) - elif data[1] == 'owner': - obj.config_path = 'rclone.conf' - obj.path = '' - obj.remote = '' - await obj.list_remotes() - elif data[1] == 'user': - obj.config_path = obj.user_rcc_path - obj.path = '' - obj.remote = '' - await obj.list_remotes() - obj.query_proc = False - - class RcloneList: - def __init__(self, client, message): + def __init__( + self, + client: pyrogram.Client, + message: pyrogram.Message, + ): self.__user_id = message.from_user.id self.__rc_user = False self.__rc_owner = False @@ -102,15 +44,15 @@ def __init__(self, client, message): self.__reply_to = None self.__time = time() self.__timeout = 240 - self.remote = '' + self.remote = "" self.is_cancelled = False self.query_proc = False - self.item_type = '--dirs-only' - self.event = Event() - self.user_rcc_path = f'rclone/{self.__user_id}.conf' - self.config_path = '' - self.path = '' - self.list_status = '' + self.item_type = "--dirs-only" + self.event = asyncio.Event() + self.user_rcc_path = f"rclone/{self.__user_id}.conf" + self.config_path = "" + self.path = "" + self.list_status = "" self.path_list = [] self.iter_start = 0 self.page_step = 1 @@ -118,24 +60,26 @@ def __init__(self, client, message): @new_thread async def __event_handler(self): pfunc = partial(path_updates, obj=self) - handler = self.__client.add_handler(CallbackQueryHandler( - pfunc, filters=regex('^rcq') & user(self.__user_id)), group=-1) + handler = self.__client.add_handler( + CallbackQueryHandler(pfunc, filters=regex("^rcq") & user(self.__user_id)), + group=-1, + ) try: - await wait_for(self.event.wait(), timeout=self.__timeout) - except: - self.path = '' - self.remote = 'Timed Out. Task has been cancelled!' + await self.event.wait() + except asyncio.TimeoutError: + self.path = "" + self.remote = "Timed Out. Task has been cancelled!" self.is_cancelled = True self.event.set() finally: self.__client.remove_handler(*handler) - async def __send_list_message(self, msg, button): + async def __send_list_message(self, msg: str, button: List[List[str]]) -> None: if not self.is_cancelled: if self.__reply_to is None: - self.__reply_to = await sendMessage(self.__message, msg, button) + self.__reply_to = await send_message(self.__message, msg, button) else: - await editMessage(self.__reply_to, msg, button) + await edit_message(self.__reply_to, msg, button) async def get_path_buttons(self): items_no = len(self.path_list) @@ -144,45 +88,45 @@ async def get_path_buttons(self): self.iter_start = 0 elif self.iter_start < 0 or self.iter_start > items_no: self.iter_start = LIST_LIMIT * (pages - 1) - page = (self.iter_start/LIST_LIMIT) + 1 if self.iter_start != 0 else 1 + page = (self.iter_start // LIST_LIMIT) + 1 if self.iter_start != 0 else 1 buttons = ButtonMaker() - for index, idict in enumerate(self.path_list[self.iter_start:LIST_LIMIT+self.iter_start]): + for index, idict in enumerate( + self.path_list[self.iter_start : LIST_LIMIT + self.iter_start] + ): orig_index = index + self.iter_start - if idict['IsDir']: - ptype = 'fo' - name = idict['Path'] + if idict["IsDir"]: + ptype = "fo" + name = idict["Path"] else: - ptype = 'fi' + ptype = "fi" name = f"[{get_readable_file_size(idict['Size'])}] {idict['Path']}" - buttons.ibutton(name, f'rcq pa {ptype} {orig_index}') + buttons.ibutton(name, f"rcq pa {ptype} {orig_index}") if items_no > LIST_LIMIT: for i in [1, 2, 4, 6, 10, 30, 50, 100]: - buttons.ibutton(i, f'rcq ps {i}', position='header') - buttons.ibutton('Previous', 'rcq pre', position='footer') - buttons.ibutton('Next', 'rcq nex', position='footer') - if self.list_status == 'rcd': - if self.item_type == '--dirs-only': - buttons.ibutton( - 'Files', 'rcq itype --files-only', position='footer') + buttons.ibutton(str(i), f"rcq ps {i}", position="header") + buttons.ibutton("Previous", "rcq pre", position="footer") + buttons.ibutton("Next", "rcq nex", position="footer") + if self.list_status == "rcd": + if self.item_type == "--dirs-only": + buttons.ibutton("Files", "rcq itype --files-only", position="footer") else: - buttons.ibutton( - 'Folders', 'rcq itype --dirs-only', position='footer') - if self.list_status == 'rcu' or len(self.path_list) > 0: - buttons.ibutton('Choose Current Path', - 'rcq cur', position='footer') - if self.list_status == 'rcu': - buttons.ibutton('Set as Default Path', - 'rcq def', position='footer') + buttons.ibutton("Folders", "rcq itype --dirs-only", position="footer") + if self.list_status == "rcu" or len(self.path_list) > 0: + buttons.ibutton("Choose Current Path", "rcq cur", position="footer") + if self.list_status == "rcu": + buttons.ibutton("Set as Default Path", "rcq def", position="footer") if self.path or len(self.__sections) > 1 or self.__rc_user and self.__rc_owner: - buttons.ibutton('Back', 'rcq back pa', position='footer') + buttons.ibutton("Back", "rcq back pa", position="footer") if self.path: - buttons.ibutton('Back To Root', 'rcq root', position='footer') - buttons.ibutton('Cancel', 'rcq cancel', position='footer') + buttons.ibutton("Back To Root", "rcq root", position="footer") + buttons.ibutton("Cancel", "rcq cancel", position="footer") button = buttons.build_menu(f_cols=2) - msg = 'Choose Path:' + ('\nTransfer Type: Download' if self.list_status == - 'rcd' else '\nTransfer Type: Upload') - if self.list_status == 'rcu': - default_path = config_dict['RCLONE_PATH'] + msg = ( + "Choose Path:" + + ('\nTransfer Type: Download' if self.list_status == "rcd" else '\nTransfer Type: Upload') + ) + if self.list_status == "rcu": + default_path = config_dict["RCLONE_PATH"] msg += f"\nDefault Rclone Path: {default_path}" if default_path else '' msg += f'\n\nItems: {items_no}' if items_no > LIST_LIMIT: @@ -192,13 +136,22 @@ async def get_path_buttons(self): msg += f'\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' await self.__send_list_message(msg, button) - async def get_path(self, itype=''): + async def get_path(self, itype: Optional[str] = None): if itype: - self.item_type == itype - elif self.list_status == 'rcu': - self.item_type == '--dirs-only' - cmd = ['rclone', 'lsjson', self.item_type, '--fast-list', '--no-mimetype', - '--no-modtime', '--config', self.config_path, f"{self.remote}{self.path}"] + self.item_type = itype + elif self.list_status == "rcu": + self.item_type = "--dirs-only" + cmd = [ + "rclone", + "lsjson", + self.item_type, + "--fast-list", + "--no-mimetype", + "--no-modtime", + "--config", + self.config_path, + f"{self.remote}{self.path}", + ] if self.is_cancelled: return res, err, code = await cmd_exec(cmd) @@ -206,12 +159,12 @@ async def get_path(self, itype=''): LOGGER.error( f'While rclone listing. Path: {self.remote}{self.path}. Stderr: {err}') self.remote = err[:4000] - self.path = '' + self.path = "" self.event.set() return result = loads(res) - if len(result) == 0 and itype != self.item_type and self.list_status == 'rcd': - itype = '--dirs-only' if self.item_type == '--files-only' else '--files-only' + if len(result) == 0 and itype != self.item_type and self.list_status == "rcd": + itype = "--dirs-only" if self.item_type == "--files-only" else "--files-only" self.item_type = itype return await self.get_path(itype) self.path_list = sorted(result, key=lambda x: x["Path"]) @@ -219,72 +172,85 @@ async def get_path(self, itype=''): await self.get_path_buttons() async def list_remotes(self): - config = ConfigParser() - async with aiopen(self.config_path, 'r') as f: - contents = await f.read() - config.read_string(contents) - if config.has_section('combine'): - config.remove_section('combine') + config = configparser.ConfigParser() + async with aiofiles.open(self.config_path, "r") as f: + try: + contents = await f.read() + config.read_string(contents) + except Exception as e: + await delete_message(self.__reply_to) + await send_message( + self.__message, + f"Error reading config file: {str(e)}", + ) + return + if config.has_section("combine"): + config.remove_section("combine") self.__sections = config.sections() if len(self.__sections) == 1: - self.remote = f'{self.__sections[0]}:' + self.remote = f"{self.__sections[0]}:" await self.get_path() else: - msg = 'Choose Rclone remote:' + \ - ('\nTransfer Type: Download' if self.list_status == - 'rcd' else '\nTransfer Type: Upload') + msg = ( + 'Choose Rclone remote:' + + ('\nTransfer Type: Download' if self.list_status == "rcd" else '\nTransfer Type: Upload') + ) msg += f'\nConfig Path: {self.config_path}' msg += f'\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' buttons = ButtonMaker() for remote in self.__sections: - buttons.ibutton(remote, f'rcq re {remote}:') + buttons.ibutton(remote, f"rcq re {remote}:") if self.__rc_user and self.__rc_owner: - buttons.ibutton('Back', 'rcq back re', position='footer') - buttons.ibutton('Cancel', 'rcq cancel', position='footer') + buttons.ibutton("Back", "rcq back re", position="footer") + buttons.ibutton("Cancel", "rcq cancel", position="footer") button = buttons.build_menu(2) await self.__send_list_message(msg, button) async def list_config(self): if self.__rc_user and self.__rc_owner: - msg = 'Choose Rclone config:' + \ - ('\nTransfer Type: Download' if self.list_status == - 'rcd' else '\nTransfer Type: Upload') + msg = ( + 'Choose Rclone config:' + + ('\nTransfer Type: Download' if self.list_status == "rcd" else '\nTransfer Type: Upload') + ) msg += f'\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' buttons = ButtonMaker() - buttons.ibutton('Owner Config', 'rcq owner') - buttons.ibutton('My Config', 'rcq user') - buttons.ibutton('Cancel', 'rcq cancel') + buttons.ibutton("Owner Config", "rcq owner") + buttons.ibutton("My Config", "rcq user") + buttons.ibutton("Cancel", "rcq cancel") button = buttons.build_menu(2) await self.__send_list_message(msg, button) else: - self.config_path = 'rclone.conf' if self.__rc_owner else self.user_rcc_path + self.config_path = "rclone.conf" if self.__rc_owner else self.user_rcc_path await self.list_remotes() async def back_from_path(self): if self.path: - path = self.path.rsplit('/', 1) - self.path = path[0] if len(path) > 1 else '' + path = self.path.rsplit("/", 1) + self.path = path[0] if len(path) > 1 else "" await self.get_path() elif len(self.__sections) > 1: await self.list_remotes() else: await self.list_config() - async def get_rclone_path(self, status, config_path=None): + async def get_rclone_path( + self, status: str, config_path: Optional[str] = None + ) -> Optional[str]: self.list_status = status future = self.__event_handler() if config_path is None: - self.__rc_user = await aiopath.exists(self.user_rcc_path) - self.__rc_owner = await aiopath.exists('rclone.conf') + self.__rc_user = Path(self.user_rcc_path).exists() + self.__rc_owner = Path("rclone.conf").exists() if not self.__rc_owner and not self.__rc_user: self.event.set() - return 'Rclone Config not Exists!' + return "Rclone Config not Exists!" await self.list_config() else: self.config_path = config_path await self.list_remotes() - await wrap_future(future) - await deleteMessage(self.__reply_to) - if self.config_path != 'rclone.conf' and not self.is_cancelled: - return f'mrcc:{self.remote}{self.path}' - return f'{self.remote}{self.path}' + await asyncio.wait([future]) + await delete_message(self.__reply_to) + if self.config_path != "rclone.conf" and not self.is_cancelled: + return f"mrcc:{self.remote}{self.path}" + return f"{self.remote}{self.path}" + diff --git a/bot/helper/mirror_utils/rclone_utils/serve.py b/bot/helper/mirror_utils/rclone_utils/serve.py index 2f1f4f9c8b..1532027b2f 100644 --- a/bot/helper/mirror_utils/rclone_utils/serve.py +++ b/bot/helper/mirror_utils/rclone_utils/serve.py @@ -1,21 +1,24 @@ -from asyncio import create_subprocess_exec +from asyncio import create_subprocess_exec, TimeoutError from aiofiles.os import path as aiopath from aiofiles import open as aiopen from configparser import ConfigParser from bot import config_dict, bot_loop -RcloneServe = [] +RcloneServe: list[asyncio.subprocess.Process] = [] - -async def rclone_serve_booter(): +async def rclone_serve_booter() -> None: + """ + Starts or stops the rclone serve process based on the configuration. + """ if not config_dict['RCLONE_SERVE_URL'] or not await aiopath.exists('rclone.conf'): if RcloneServe: - try: - RcloneServe[0].kill() - RcloneServe.clear() - except: - pass + for proc in RcloneServe: + try: + proc.kill() + except ProcessLookupError: + pass + RcloneServe.clear() return config = ConfigParser() async with aiopen('rclone.conf', 'r') as f: @@ -30,18 +33,26 @@ async def rclone_serve_booter(): with open('rclone.conf', 'w') as f: config.write(f, space_around_delimiters=False) if RcloneServe: - try: - RcloneServe[0].kill() - RcloneServe.clear() - except: - pass + for proc in RcloneServe: + try: + proc.kill() + except ProcessLookupError: + pass + RcloneServe.clear() + if not await aiopath.exists('/usr/bin/rclone'): + raise FileNotFoundError('rclone not found in /usr/bin/') + if not config_dict['RCLONE_SERVE_PORT'].isdigit() or int(config_dict['RCLONE_SERVE_PORT']) <= 0 or int(config_dict['RCLONE_SERVE_PORT']) > 65535: + raise ValueError('Invalid RCLONE_SERVE_PORT value') cmd = ["rclone", "serve", "http", "--config", "rclone.conf", "--no-modtime", "combine:", "--addr", f":{config_dict['RCLONE_SERVE_PORT']}", "--vfs-cache-mode", "full", "--vfs-cache-max-age", "1m0s", "--buffer-size", "64M"] if (user := config_dict['RCLONE_SERVE_USER']) and (pswd := config_dict['RCLONE_SERVE_PASS']): cmd.extend(("--user", user, "--pass", pswd)) - rcs = await create_subprocess_exec(*cmd) + try: + rcs = await create_subprocess_exec(*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, limit=1024*1024) + except TimeoutError: + raise TimeoutError('Subprocess creation timed out') RcloneServe.append(rcs) bot_loop.run_until_complete(rclone_serve_booter()) diff --git a/bot/helper/mirror_utils/rclone_utils/transfer.py b/bot/helper/mirror_utils/rclone_utils/transfer.py index 5762a08014..4d53cd5f38 100644 --- a/bot/helper/mirror_utils/rclone_utils/transfer.py +++ b/bot/helper/mirror_utils/rclone_utils/transfer.py @@ -1,86 +1,105 @@ -from asyncio import create_subprocess_exec, gather +import asyncio +import os +import re +import json +from typing import List, Tuple, Dict, Any, Union, Optional from asyncio.subprocess import PIPE -from re import findall as re_findall -from json import loads -from aiofiles.os import path as aiopath, mkdir, listdir -from aiofiles import open as aiopen -from configparser import ConfigParser -from random import randrange -from logging import getLogger - -from bot import config_dict, GLOBAL_EXTENSION_FILTER -from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async +import aiofiles +import aiofiles.os +import configparser +import logging +import shutil +import tarfile +from bot.helper.ext_utils.bot_utils import cmd_exec from bot.helper.ext_utils.fs_utils import get_mime_type, count_files_and_folders - -LOGGER = getLogger(__name__) - +logger = logging.getLogger(__name__) class RcloneTransferHelper: - def __init__(self, listener=None, name=''): - self.__listener = listener - self.__proc = None - self.__transferred_size = '0 B' - self.__eta = '-' - self.__percentage = '0%' - self.__speed = '0 B/s' - self.__size = '0 B' - self.__is_cancelled = False - self.__is_download = False - self.__is_upload = False - self.__sa_count = 1 - self.__sa_index = 0 - self.__sa_number = 0 + def __init__(self, listener: Any, name: str): + self.listener = listener self.name = name + self.proc = None + self.transferred_size = "0 B" + self.eta = "-" + self.percentage = "0%" + self.speed = "0 B/s" + self.size = "0 B" + self.is_cancelled = False + self.is_download = False + self.is_upload = False + self.sa_count = 1 + self.sa_index = 0 + self.sa_number = 0 @property def transferred_size(self): return self.__transferred_size + @transferred_size.setter + def transferred_size(self, value: str): + self.__transferred_size = value + @property def percentage(self): return self.__percentage + @percentage.setter + def percentage(self, value: str): + self.__percentage = value + @property def speed(self): return self.__speed + @speed.setter + def speed(self, value: str): + self.__speed = value + @property def eta(self): return self.__eta + @eta.setter + def eta(self, value: str): + self.__eta = value + @property def size(self): return self.__size + @size.setter + def size(self, value: str): + self.__size = value + async def __progress(self): - while not (self.__proc is None or self.__is_cancelled): + while not (self.proc is None or self.is_cancelled): try: - data = (await self.__proc.stdout.readline()).decode() - except: + data = (await self.proc.stdout.readline()).decode() + except Exception as e: + logger.error(f"Error reading progress: {e}") continue if not data: break - if data := re_findall(r'Transferred:\s+([\d.]+\s*\w+)\s+/\s+([\d.]+\s*\w+),\s+([\d.]+%)\s*,\s+([\d.]+\s*\w+/s),\s+ETA\s+([\dwdhms]+)', data): - self.__transferred_size, self.__size, self.__percentage, self.__speed, self.__eta = data[ - 0] + if match := re.findall(r'Transferred:\s+([\d.]+\s*\w+)\s+/\s+([\d.]+\s*\w+),\s+([\d.]+%)\s*,\s+([\d.]+\s*\w+/s),\s+ETA\s+([\dwdhms]+)', data): + self.transferred_size, self.size, self.percentage, self.speed, self.eta = match[0] - def __switchServiceAccount(self): - if self.__sa_index == self.__sa_number - 1: - self.__sa_index = 0 + def __switch_service_account(self): + if self.sa_index == self.sa_number - 1: + self.sa_index = 0 else: - self.__sa_index += 1 - self.__sa_count += 1 - remote = f'sa{self.__sa_index:03}' - LOGGER.info(f"Switching to {remote} remote") + self.sa_index += 1 + self.sa_count += 1 + remote = f'sa{self.sa_index:03}' + logger.info(f"Switching to {remote} remote") return remote - async def __create_rc_sa(self, remote, remote_opts): + async def __create_rc_sa(self, remote: str, remote_opts: Dict[str, str]) -> str: sa_conf_dir = 'rclone_sa' sa_conf_file = f'{sa_conf_dir}/{remote}.conf' - if not await aiopath.isdir(sa_conf_dir): - await mkdir(sa_conf_dir) - elif await aiopath.isfile(sa_conf_file): + if not await aiofiles.os.path.isdir(sa_conf_dir): + await aiofiles.os.mkdir(sa_conf_dir) + elif await aiofiles.os.path.isfile(sa_conf_file): return sa_conf_file if gd_id := remote_opts.get('team_drive'): @@ -90,73 +109,81 @@ async def __create_rc_sa(self, remote, remote_opts): else: return 'rclone.conf' - files = await listdir('accounts') + files = await aiofiles.os.listdir('accounts') text = ''.join(f"[sa{i:03}]\ntype = drive\nscope = drive\nservice_account_file = accounts/{sa}\n{option} = {gd_id}\n\n" for i, sa in enumerate(files)) - async with aiopen(sa_conf_file, 'w') as f: + async with aiofiles.open(sa_conf_file, 'w') as f: await f.write(text) return sa_conf_file - async def __start_download(self, cmd, remote_type): - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) + async def __start_download(self, cmd: List[str], remote_type: str): + self.proc = await asyncio.create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + await self.__progress() + if self.is_cancelled: + return - if self.__is_cancelled: + stdout, stderr = self.proc.communicate() + if stdout: + logger.debug(f"Download stdout: {stdout.decode()}") + if stderr: + logger.debug(f"Download stderr: {stderr.decode()}") + + if self.is_cancelled: return - if return_code == 0: - await self.__listener.onDownloadComplete() - elif return_code != -9: - error = (await self.__proc.stderr.read()).decode().strip() + if self.proc.returncode == 0: + await self.listener.on_download_complete() + elif self.proc.returncode != -9: + error = stderr.decode().strip() if not error and remote_type == 'drive' and config_dict['USE_SERVICE_ACCOUNTS']: error = "Mostly your service accounts don't have access to this drive!" - LOGGER.error(error) + logger.error(error) - if self.__sa_number != 0 and remote_type == 'drive' and 'RATE_LIMIT_EXCEEDED' in error and config_dict['USE_SERVICE_ACCOUNTS']: - if self.__sa_count < self.__sa_number: - remote = self.__switchServiceAccount() + if self.sa_number != 0 and remote_type == 'drive' and 'RATE_LIMIT_EXCEEDED' in error and config_dict['USE_SERVICE_ACCOUNTS']: + if self.sa_count < self.sa_number: + remote = self.__switch_service_account() cmd[6] = f"{remote}:{cmd[6].split(':', 1)[1]}" - if self.__is_cancelled: + if self.is_cancelled: return return await self.__start_download(cmd, remote_type) else: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}") + logger.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}") - await self.__listener.onDownloadError(error[:4000]) + await self.listener.on_download_error(error[:4000]) - async def download(self, remote, rc_path, config_path, path): - self.__is_download = True + async def download(self, remote: str, rc_path: str, config_path: str, path: str): + self.is_download = True try: remote_opts = await self.__get_remote_options(config_path, remote) except Exception as err: - await self.__listener.onDownloadError(str(err)) + await self.listener.on_download_error(str(err)) return remote_type = remote_opts['type'] if remote_type == 'drive' and config_dict['USE_SERVICE_ACCOUNTS'] and config_path == 'rclone.conf' \ - and await aiopath.isdir('accounts') and not remote_opts.get('service_account_file'): + and await aiofiles.os.path.isdir('accounts') and not remote_opts.get('service_account_file'): config_path = await self.__create_rc_sa(remote, remote_opts) if config_path != 'rclone.conf': - sa_files = await listdir('accounts') - self.__sa_number = len(sa_files) - self.__sa_index = randrange(self.__sa_number) - remote = f'sa{self.__sa_index:03}' - LOGGER.info(f'Download with service account {remote}') - - rcflags = self.__listener.rcFlags or config_dict['RCLONE_FLAGS'] - cmd = self.__getUpdatedCommand( + sa_files = await aiofiles.os.listdir('accounts') + self.sa_number = len(sa_files) + self.sa_index = randrange(self.sa_number) + remote = f'sa{self.sa_index:03}' + logger.info(f'Download with service account {remote}') + + rcflags = self.listener.rc_flags or config_dict['RCLONE_FLAGS'] + cmd = self.__get_updated_command( config_path, f'{remote}:{rc_path}', path, rcflags, 'copy') - if remote_type == 'drive' and not config_dict['RCLONE_FLAGS'] and not self.__listener.rcFlags: + if remote_type == 'drive' and not config_dict['RCLONE_FLAGS'] and not self.listener.rc_flags: cmd.append('--drive-acknowledge-abuse') elif remote_type != 'drive': cmd.extend(('--retries-sleep', '3s')) await self.__start_download(cmd, remote_type) - async def __get_gdrive_link(self, config_path, remote, rc_path, mime_type): + async def __get_gdrive_link(self, config_path: str, remote: str, rc_path: str, mime_type: str) -> Tuple[str, str]: if mime_type == 'Folder': epath = rc_path.strip('/').rsplit('/', 1) epath = f'{remote}:{epath[0]}' if len( @@ -164,229 +191,25 @@ async def __get_gdrive_link(self, config_path, remote, rc_path, mime_type): destination = f'{remote}:{rc_path}' elif rc_path: epath = f"{remote}:{rc_path}/{self.name}" - destination = epath + destination = f'{remote}:{rc_path}/{self.name}' else: epath = f"{remote}:{rc_path}{self.name}" - destination = epath + destination = f'{remote}:{rc_path}{self.name}' cmd = ['rclone', 'lsjson', '--fast-list', '--no-mimetype', '--no-modtime', '--config', config_path, epath] res, err, code = await cmd_exec(cmd) if code == 0: - result = loads(res) + result = json.loads(res) fid = next((r['ID'] for r in result if r['Path'] == self.name), 'err') link = f'https://drive.google.com/drive/folders/{fid}' if mime_type == 'Folder' else f'https://drive.google.com/uc?id={fid}&export=download' elif code != -9: - LOGGER.error( + logger.error( f'while getting drive link. Path: {destination}. Stderr: {err}') link = '' return link, destination - async def __start_upload(self, cmd, remote_type): - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) - - if self.__is_cancelled: - return False - - if return_code == -9: - return False - elif return_code != 0: - error = (await self.__proc.stderr.read()).decode().strip() - if not error and remote_type == 'drive' and config_dict['USE_SERVICE_ACCOUNTS']: - error = "Mostly your service accounts don't have access to this drive!" - LOGGER.error(error) - if self.__sa_number != 0 and remote_type == 'drive' and 'RATE_LIMIT_EXCEEDED' in error and config_dict['USE_SERVICE_ACCOUNTS']: - if self.__sa_count < self.__sa_number: - remote = self.__switchServiceAccount() - cmd[7] = f"{remote}:{cmd[7].split(':', 1)[1]}" - return False if self.__is_cancelled else await self.__start_upload(cmd, remote_type) - else: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}") - await self.__listener.onUploadError(error[:4000]) - return False - else: - return True - - async def upload(self, path, size): - self.__is_upload = True - rc_path = self.__listener.upPath.strip('/') - if rc_path.startswith('mrcc:'): - rc_path = rc_path.split('mrcc:', 1)[1] - oconfig_path = f'rclone/{self.__listener.message.from_user.id}.conf' - else: - oconfig_path = 'rclone.conf' - - oremote, rc_path = rc_path.split(':', 1) - - if await aiopath.isdir(path): - mime_type = 'Folder' - folders, files = await count_files_and_folders(path) - rc_path += f"/{self.name}" if rc_path else self.name - else: - if path.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - await self.__listener.onUploadError('This file extension is excluded by extension filter!') - return - mime_type = await sync_to_async(get_mime_type, path) - folders = 0 - files = 1 - - try: - remote_opts = await self.__get_remote_options(oconfig_path, oremote) - except Exception as err: - await self.__listener.onUploadError(str(err)) - return - remote_type = remote_opts['type'] - - fremote = oremote - fconfig_path = oconfig_path - if remote_type == 'drive' and config_dict['USE_SERVICE_ACCOUNTS'] and fconfig_path == 'rclone.conf' \ - and await aiopath.isdir('accounts') and not remote_opts.get('service_account_file'): - fconfig_path = await self.__create_rc_sa(oremote, remote_opts) - if fconfig_path != 'rclone.conf': - sa_files = await listdir('accounts') - self.__sa_number = len(sa_files) - self.__sa_index = randrange(self.__sa_number) - fremote = f'sa{self.__sa_index:03}' - LOGGER.info(f'Upload with service account {fremote}') - - rcflags = self.__listener.rcFlags or config_dict['RCLONE_FLAGS'] - method = 'move' if not self.__listener.seed or self.__listener.newDir else 'copy' - cmd = self.__getUpdatedCommand( - fconfig_path, path, f'{fremote}:{rc_path}', rcflags, method) - if remote_type == 'drive' and not config_dict['RCLONE_FLAGS'] and not self.__listener.rcFlags: - cmd.extend(('--drive-chunk-size', '64M', - '--drive-upload-cutoff', '32M')) - elif remote_type != 'drive': - cmd.extend(('--retries-sleep', '3s')) - - result = await self.__start_upload(cmd, remote_type) - if not result: - return + async def __start_upload(self, cmd: List[str], remote_type: str) -> bool: - if remote_type == 'drive': - link, destination = await self.__get_gdrive_link(oconfig_path, oremote, rc_path, mime_type) - else: - if mime_type == 'Folder': - destination = f"{oremote}:{rc_path}" - elif rc_path: - destination = f"{oremote}:{rc_path}/{self.name}" - else: - destination = f"{oremote}:{self.name}" - - cmd = ['rclone', 'link', '--config', oconfig_path, destination] - res, err, code = await cmd_exec(cmd) - - if code == 0: - link = res - elif code != -9: - LOGGER.error( - f'while getting link. Path: {destination} | Stderr: {err}') - link = '' - if self.__is_cancelled: - return - LOGGER.info(f'Upload Done. Path: {destination}') - await self.__listener.onUploadComplete(link, size, files, folders, mime_type, self.name, destination) - - async def clone(self, config_path, src_remote, src_path, destination, rcflags, mime_type): - dst_remote, dst_path = destination.split(':', 1) - - try: - src_remote_opts, dst_remote_opt = await gather(self.__get_remote_options(config_path, src_remote), - self.__get_remote_options(config_path, dst_remote)) - except Exception as err: - await self.__listener.onUploadError(str(err)) - return None, None - - src_remote_type, dst_remote_type = src_remote_opts['type'], dst_remote_opt['type'] - - cmd = self.__getUpdatedCommand( - config_path, f'{src_remote}:{src_path}', destination, rcflags, 'copy') - if not rcflags: - if src_remote_type == 'drive' and dst_remote_type != 'drive': - cmd.append('--drive-acknowledge-abuse') - elif dst_remote_type == 'drive' and src_remote_type != 'drive': - cmd.extend(('--drive-chunk-size', '64M', - '--drive-upload-cutoff', '32M')) - elif src_remote_type == 'drive': - cmd.extend(('--tpslimit', '3', '--transfers', '3')) - - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) - - if self.__is_cancelled: - return None, None - - if return_code == -9: - return None, None - elif return_code != 0: - error = (await self.__proc.stderr.read()).decode().strip() - LOGGER.error(error) - await self.__listener.onUploadError(error[:4000]) - return None, None - else: - if dst_remote_type == 'drive': - link, destination = await self.__get_gdrive_link(config_path, dst_remote, dst_path, mime_type) - return (None, None) if self.__is_cancelled else (link, destination) - else: - if mime_type != 'Folder': - destination += f'/{self.name}' if dst_path else self.name - - cmd = ['rclone', 'link', '--config', config_path, destination] - res, err, code = await cmd_exec(cmd) - - if self.__is_cancelled: - return None, None - - if code == 0: - return res, destination - elif code != -9: - LOGGER.error( - f'while getting link. Path: {destination} | Stderr: {err}') - await self.__listener.onUploadError(err[:4000]) - return None, None - - @staticmethod - def __getUpdatedCommand(config_path, source, destination, rcflags, method): - ext = '*.{' + ','.join(GLOBAL_EXTENSION_FILTER) + '}' - cmd = ['rclone', method, '--fast-list', '--config', config_path, '-P', source, destination, - '--exclude', ext, '--ignore-case', '--low-level-retries', '1', '-M', '--log-file', - 'rlog.txt', '--log-level', 'DEBUG'] - if rcflags: - rcflags = rcflags.split('|') - for flag in rcflags: - if ":" in flag: - key, value = map(str.strip, flag.split(':', 1)) - cmd.extend((key, value)) - elif len(flag) > 0: - cmd.append(flag.strip()) - return cmd - - @staticmethod - async def __get_remote_options(config_path, remote): - config = ConfigParser() - async with aiopen(config_path, 'r') as f: - contents = await f.read() - config.read_string(contents) - options = config.options(remote) - return {opt: config.get(remote, opt) for opt in options} - - async def cancel_download(self): - self.__is_cancelled = True - if self.__proc is not None: - try: - self.__proc.kill() - except: - pass - if self.__is_download: - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError('Download stopped by user!') - elif self.__is_upload: - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError('your upload has been stopped!') - else: - LOGGER.info(f"Cancelling Clone: {self.name}") - await self.__listener.onUploadError('your clone has been stopped!') diff --git a/bot/helper/mirror_utils/status_utils/ddl_status.py b/bot/helper/mirror_utils/status_utils/ddl_status.py index eafa18ab5c..3c1d2aa025 100644 --- a/bot/helper/mirror_utils/status_utils/ddl_status.py +++ b/bot/helper/mirror_utils/status_utils/ddl_status.py @@ -4,45 +4,4 @@ class DDLStatus: def __init__(self, obj, size, message, gid, upload_details): self.__obj = obj - self.__size = size - self.__gid = gid - self.upload_details = upload_details - self.message = message - def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) - - def size(self): - return get_readable_file_size(self.__size) - - def status(self): - return MirrorStatus.STATUS_UPLOADDDL - - def name(self): - return self.__obj.name - - def progress(self): - try: - progress_raw = self.__obj.processed_bytes / self.__size * 100 - except: - progress_raw = 0 - return f'{round(progress_raw, 2)}%' - - def speed(self): - return f'{get_readable_file_size(self.__obj.speed)}/s' - - def eta(self): - try: - seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed - return get_readable_time(seconds) - except: - return '-' - - def gid(self) -> str: - return self.__gid - - def download(self): - return self.__obj - - def eng(self): - return self.__obj.engine diff --git a/bot/helper/mirror_utils/status_utils/gdrive_status.py b/bot/helper/mirror_utils/status_utils/gdrive_status.py index 5f2f164fda..84b62a553b 100644 --- a/bot/helper/mirror_utils/status_utils/gdrive_status.py +++ b/bot/helper/mirror_utils/status_utils/gdrive_status.py @@ -1,39 +1,36 @@ #!/usr/bin/env python3 from bot.helper.ext_utils.bot_utils import EngineStatus, MirrorStatus, get_readable_file_size, get_readable_time - -class GdriveStatus: +class GDriveStatus: def __init__(self, obj, size, message, gid, status, upload_details): - self.__obj = obj - self.__size = size - self.__gid = gid - self.__status = status - self.upload_details = upload_details + self.obj = obj + self.size = size self.message = message + self.gid = gid + self.status = status + self.upload_details = upload_details def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) - - def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self.obj.processed_bytes) def status(self): - if self.__status == 'up': - return MirrorStatus.STATUS_UPLOADING - elif self.__status == 'dl': - return MirrorStatus.STATUS_DOWNLOADING - else: - return MirrorStatus.STATUS_CLONING + match self.status: + case 'up': + return MirrorStatus.STATUS_UPLOADING + case 'dl': + return MirrorStatus.STATUS_DOWNLOADING + case _: + return MirrorStatus.STATUS_CLONING def name(self): - return self.__obj.name + return self.obj.name def gid(self) -> str: - return self.__gid + return self.gid def progress_raw(self): try: - return self.__obj.processed_bytes / self.__size * 100 + return self.obj.processed_bytes / self.size * 100 except: return 0 @@ -41,18 +38,17 @@ def progress(self): return f'{round(self.progress_raw(), 2)}%' def speed(self): - return f'{get_readable_file_size(self.__obj.speed)}/s' + return f'{get_readable_file_size(self.obj.speed)}/s' def eta(self): try: - seconds = (self.__size - self.__obj.processed_bytes) / \ - self.__obj.speed + seconds = (self.size - self.obj.processed_bytes) / self.obj.speed return get_readable_time(seconds) except: return '-' def download(self): - return self.__obj + return self.obj - def eng(self): - return EngineStatus().STATUS_GD \ No newline at end of file + def engine(self): + return EngineStatus().STATUS_GD diff --git a/bot/helper/mirror_utils/status_utils/mega_download_status.py b/bot/helper/mirror_utils/status_utils/mega_download_status.py index e0db93f18d..d0994daac3 100644 --- a/bot/helper/mirror_utils/status_utils/mega_download_status.py +++ b/bot/helper/mirror_utils/status_utils/mega_download_status.py @@ -1,10 +1,18 @@ #!/usr/bin/env python3 from bot.helper.ext_utils.bot_utils import EngineStatus, get_readable_file_size, MirrorStatus, get_readable_time - class MegaDownloadStatus: + def __init__(self, name: str, size: int, gid: str, obj, message, upload_details): + """ + Initialize the MegaDownloadStatus class with the given parameters. - def __init__(self, name, size, gid, obj, message, upload_details): + :param name: The name of the file being downloaded + :param size: The size of the file in bytes + :param gid: The globally unique identifier for the download + :param obj: The object containing the download details + :param message: The message object associated with the download + :param upload_details: The upload details of the file + """ self.__obj = obj self.__name = name self.__size = size @@ -12,43 +20,108 @@ def __init__(self, name, size, gid, obj, message, upload_details): self.message = message self.upload_details = upload_details - def name(self): + @property + def name(self) -> str: + """ + Return the name of the file being downloaded. + + :return: The name of the file + """ return self.__name - def progress_raw(self): + @property + def progress_raw(self) -> float: + """ + Return the progress of the download as a raw value between 0 and 100. + + :return: The progress of the download + """ try: return round(self.__obj.downloaded_bytes / self.__size * 100, 2) except: return 0.0 - def progress(self): - return f"{self.progress_raw()}%" + @property + def progress(self) -> str: + """ + Return the progress of the download as a formatted string with a percentage sign. - def status(self): + :return: The progress of the download as a formatted string + """ + return f"{self.progress_raw}%" + + @property + def status(self) -> MirrorStatus: + """ + Return the status of the download as MirrorStatus.STATUS_DOWNLOADING. + + :return: The status of the download + """ return MirrorStatus.STATUS_DOWNLOADING - def processed_bytes(self): + @property + def processed_bytes(self) -> str: + """ + Return the number of bytes downloaded as a human-readable string. + + :return: The number of bytes downloaded as a human-readable string + """ return get_readable_file_size(self.__obj.downloaded_bytes) - def eta(self): + @property + def eta(self) -> str: + """ + Return the estimated time of arrival of the download as a human-readable string. + + :return: The estimated time of arrival of the download as a human-readable string + """ try: - seconds = (self.__size - self.__obj.downloaded_bytes) / \ - self.__obj.speed + seconds = (self.__size - self.__obj.downloaded_bytes) / self.__obj.speed return get_readable_time(seconds) except ZeroDivisionError: return '-' - def size(self): + @property + def size(self) -> str: + """ + Return the size of the file as a human-readable string. + + :return: The size of the file as a human-readable string + """ return get_readable_file_size(self.__size) - def speed(self): + @property + def speed(self) -> str: + """ + Return the speed of the download as a human-readable string. + + :return: The speed of the download as a human-readable string + """ return f'{get_readable_file_size(self.__obj.speed)}/s' - def gid(self): + @property + def gid(self) -> str: + """ + Return the globally unique identifier for the download. + + :return: The globally unique identifier for the download + """ return self.__gid - def download(self): + @property + def download(self) -> object: + """ + Return the object containing the download details. + + :return: The object containing the download details + """ return self.__obj - def eng(self): - return EngineStatus().STATUS_MEGA \ No newline at end of file + @property + def eng(self) -> EngineStatus: + """ + Return the engine status as EngineStatus.STATUS_MEGA. + + :return: The engine status + """ + return EngineStatus().STATUS_MEGA diff --git a/bot/helper/mirror_utils/status_utils/qbit_status.py b/bot/helper/mirror_utils/status_utils/qbit_status.py index b9ae65837d..6293a39881 100644 --- a/bot/helper/mirror_utils/status_utils/qbit_status.py +++ b/bot/helper/mirror_utils/status_utils/qbit_status.py @@ -1,22 +1,22 @@ #!/usr/bin/env python3 -from asyncio import sleep +from typing import Optional from bot import LOGGER, get_client, QbTorrents, qb_listener_lock from bot.helper.ext_utils.bot_utils import EngineStatus, MirrorStatus, get_readable_file_size, get_readable_time, sync_to_async - -def get_download(client, tag): +def get_download(client: object, tag: str) -> Optional[object]: try: return client.torrents_info(tag=tag)[0] except Exception as e: - LOGGER.error( - f'{e}: Qbittorrent, while getting torrent info. Tag: {tag}') + LOGGER.error(f'{e}: Qbittorrent, while getting torrent info. Tag: {tag}') return None - class QbittorrentStatus: + """ + Class to represent the status of a Qbittorrent download. + """ - def __init__(self, listener, seeding=False, queued=False): + def __init__(self, listener, seeding: bool = False, queued: bool = False): self.__client = get_client() self.__listener = listener self.upload_details = listener.upload_details @@ -26,32 +26,62 @@ def __init__(self, listener, seeding=False, queued=False): self.message = listener.message def __update(self): + """ + Update the internal state of the object with the latest information from Qbittorrent. + """ new_info = get_download(self.__client, f'{self.__listener.uid}') if new_info is not None: self.__info = new_info + def __str__(self): + """ + Return a human-readable representation of the object. + """ + return f"QbittorrentStatus(name={self.name()}, status={self.status()}, progress={self.progress()})" + def progress(self): + """ + Return the progress of the download as a percentage. + """ return f'{round(self.__info.progress*100, 2)}%' def processed_bytes(self): + """ + Return the number of bytes that have been downloaded. + """ return get_readable_file_size(self.__info.downloaded) def speed(self): + """ + Return the download speed in a human-readable format. + """ return f"{get_readable_file_size(self.__info.dlspeed)}/s" def name(self): + """ + Return the name of the download. + """ if self.__info.state in ["metaDL", "checkingResumeData"]: return f"[METADATA]{self.__info.name}" else: return self.__info.name def size(self): + """ + Return the size of the download in a human-readable format. + """ return get_readable_file_size(self.__info.size) def eta(self): + """ + Return the estimated time of arrival in a human-readable format. + """ return get_readable_time(self.__info.eta) def status(self): + """ + Return the current status of the download as a MirrorStatus object. + """ self.__update() state = self.__info.state if state == "queuedDL" or self.queued: @@ -68,42 +98,83 @@ def status(self): return MirrorStatus.STATUS_DOWNLOADING def seeders_num(self): + """ + Return the number of seeders for the download. + """ return self.__info.num_seeds def leechers_num(self): + """ + Return the number of leechers for the download. + """ return self.__info.num_leechs def uploaded_bytes(self): + """ + Return the number of bytes that have been uploaded. + """ return get_readable_file_size(self.__info.uploaded) def upload_speed(self): + """ + Return the upload speed in a human-readable format. + """ return f"{get_readable_file_size(self.__info.upspeed)}/s" def ratio(self): + """ + Return the upload-to-download ratio. + """ return f"{round(self.__info.ratio, 3)}" def seeding_time(self): + """ + Return the amount of time the download has been seeding. + """ return get_readable_time(self.__info.seeding_time) def download(self): + """ + Return the download object. + """ return self def gid(self): - return self.hash()[:12] + """ + Return the first 12 characters of the hash. + """ + self.__update() + return self.__info.hash[:12] def hash(self): + """ + Return the hash of the download. + """ self.__update() return self.__info.hash def client(self): + """ + Return the Qbittorrent client object. + """ return self.__client def listener(self): + """ + Return the listener object. + """ return self.__listener async def cancel_download(self): + """ + Cancel the download and delete the torrent from Qbittorrent. + """ self.__update() - await sync_to_async(self.__client.torrents_pause, torrent_hashes=self.__info.hash) + try: + await sync_to_async(self.__client.torrents_pause, torrent_hashes=self.__info.hash) + except Exception as e: + LOGGER.error(f'Error pausing torrent: {e}') + if not self.seeding: if self.queued: LOGGER.info(f'Cancelling QueueDL: {self.name()}') @@ -113,11 +184,23 @@ async def cancel_download(self): msg = 'Download stopped by user!' await sleep(0.3) await self.__listener.onDownloadError(msg) + + try: await sync_to_async(self.__client.torrents_delete, torrent_hashes=self.__info.hash, delete_files=True) + except Exception as e: + LOGGER.error(f'Error deleting torrent: {e}') + + try: await sync_to_async(self.__client.torrents_delete_tags, tags=self.__info.tags) - async with qb_listener_lock: - if self.__info.tags in QbTorrents: - del QbTorrents[self.__info.tags] + except Exception as e: + LOGGER.error(f'Error deleting tags: {e}') + + async with qb_listener_lock: + if self.__info.tags in QbTorrents: + del QbTorrents[self.__info.tags] def eng(self): - return EngineStatus().STATUS_QB \ No newline at end of file + """ + Return the engine status. + """ + return EngineStatus().STATUS_QB diff --git a/bot/helper/mirror_utils/status_utils/queue_status.py b/bot/helper/mirror_utils/status_utils/queue_status.py index a93a7ac70e..c55af2e348 100644 --- a/bot/helper/mirror_utils/status_utils/queue_status.py +++ b/bot/helper/mirror_utils/status_utils/queue_status.py @@ -2,9 +2,11 @@ from bot import LOGGER from bot.helper.ext_utils.bot_utils import EngineStatus, get_readable_file_size, MirrorStatus - class QueueStatus: - def __init__(self, name, size, gid, listener, status): + """ + Represents the status of a queue. + """ + def __init__(self, name: str, size: int, gid: int, listener, status: str): self.__name = name self.__size = size self.__gid = gid @@ -13,42 +15,3 @@ def __init__(self, name, size, gid, listener, status): self.__status = status self.message = listener.message - def gid(self): - return self.__gid - - def name(self): - return self.__name - - def size(self): - return get_readable_file_size(self.__size) - - def status(self): - if self.__status == 'dl': - return MirrorStatus.STATUS_QUEUEDL - return MirrorStatus.STATUS_QUEUEUP - - def processed_bytes(self): - return 0 - - def progress(self): - return '0%' - - def speed(self): - return '0B/s' - - def eta(self): - return '-' - - def download(self): - return self - - async def cancel_download(self): - LOGGER.info(f'Cancelling Queue{self.__status}: {self.__name}') - if self.__status == 'dl': - await self.__listener.onDownloadError('task have been removed from queue/download') - else: - await self.__listener.onUploadError('task have been removed from queue/upload') - - - def eng(self): - return EngineStatus().STATUS_QUEUE \ No newline at end of file diff --git a/bot/helper/mirror_utils/status_utils/rclone_status.py b/bot/helper/mirror_utils/status_utils/rclone_status.py index cb0b9f68b8..e3284947db 100644 --- a/bot/helper/mirror_utils/status_utils/rclone_status.py +++ b/bot/helper/mirror_utils/status_utils/rclone_status.py @@ -1,33 +1,83 @@ from bot.helper.ext_utils.bot_utils import EngineStatus, MirrorStatus - class RcloneStatus: def __init__(self, obj, message, gid, status, upload_details): + """ + Initialize RcloneStatus object with required parameters + + :param obj: Object containing rclone transfer information + :param message: Message object for sending updates + :param gid: Group id for the transfer + :param status: Status of the transfer (dl/up/cloning) + :param upload_details: Details of the upload + """ self.__obj = obj self.__gid = gid self.__status = status self.message = message self.upload_details = upload_details + @property def gid(self): + """ + Get the group id for the transfer + + :return: Group id + """ return self.__gid + @property def progress(self): + """ + Get the progress of the transfer as a percentage + + :return: Progress percentage + """ return self.__obj.percentage + @property def speed(self): + """ + Get the speed of the transfer + + :return: Transfer speed + """ return self.__obj.speed + @property def name(self): + """ + Get the name of the transfer + + :return: Name of the transfer + """ return self.__obj.name + @property def size(self): + """ + Get the size of the transfer + + :return: Size of the transfer + """ return self.__obj.size + @property def eta(self): + """ + Get the estimated time of arrival for the transfer + + :return: ETA for the transfer + """ return self.__obj.eta + @property def status(self): + """ + Get the status of the transfer + + :return: Status of the transfer + """ if self.__status == 'dl': return MirrorStatus.STATUS_DOWNLOADING elif self.__status == 'up': @@ -35,12 +85,29 @@ def status(self): else: return MirrorStatus.STATUS_CLONING + @property def processed_bytes(self): + """ + Get the number of bytes processed in the transfer + + :return: Number of processed bytes + """ return self.__obj.transferred_size - def download(self): - return self.__obj + @property + def obj(self): + """ + Get the rclone transfer object + :return: Rclone transfer object + """ + return self.__obj + @property def eng(self): - return EngineStatus().STATUS_RCLONE \ No newline at end of file + """ + Get the engine status for rclone + + :return: Engine status + """ + return EngineStatus().STATUS_RCLONE diff --git a/bot/helper/mirror_utils/status_utils/split_status.py b/bot/helper/mirror_utils/status_utils/split_status.py index 55b74431b0..0675a2e90a 100644 --- a/bot/helper/mirror_utils/status_utils/split_status.py +++ b/bot/helper/mirror_utils/status_utils/split_status.py @@ -2,9 +2,26 @@ from bot import LOGGER from bot.helper.ext_utils.bot_utils import EngineStatus, get_readable_file_size, MirrorStatus - class SplitStatus: - def __init__(self, name, size, gid, listener): + """ + Class representing the status of a file split operation. + """ + + def __init__( + self, + name: str, + size: int, + gid: int, + listener, + ): + """ + Initialize a new SplitStatus object. + + :param name: The name of the file being split. + :param size: The size of the file in bytes. + :param gid: The group ID associated with the file. + :param listener: The listener object associated with the file. + """ self.__name = name self.__gid = gid self.__size = size @@ -12,34 +29,83 @@ def __init__(self, name, size, gid, listener): self.upload_details = listener.upload_details self.message = listener.message - def gid(self): + @property + def gid(self) -> int: + """ + Get the group ID associated with the file. + + :return: The group ID. + """ return self.__gid - def progress(self): + def progress(self) -> str: + """ + Get the progress of the file split operation. + + :return: The progress as a string. + """ return '0' - def speed(self): + def speed(self) -> str: + """ + Get the speed of the file split operation. + + :return: The speed as a string. + """ return '0' - def name(self): + def name(self) -> str: + """ + Get the name of the file being split. + + :return: The name of the file. + """ return self.__name - def size(self): + def size(self) -> str: + """ + Get the size of the file being split. + + :return: The size of the file as a human-readable string. + """ return get_readable_file_size(self.__size) - def eta(self): + def eta(self) -> str: + """ + Get the estimated time of arrival of the file split operation. + + :return: The ETA as a string. + """ return '0s' - def status(self): + def status(self) -> MirrorStatus: + """ + Get the status of the file split operation. + + :return: The status as a MirrorStatus object. + """ return MirrorStatus.STATUS_SPLITTING - def processed_bytes(self): + def processed_bytes(self) -> int: + """ + Get the number of bytes processed by the file split operation. + + :return: The number of processed bytes. + """ return 0 - def download(self): + def download(self) -> 'SplitStatus': + """ + Get the SplitStatus object for the file download operation. + + :return: The SplitStatus object. + """ return self async def cancel_download(self): + """ + Cancel the file split operation. + """ LOGGER.info(f'Cancelling Split: {self.__name}') if self.__listener.suproc is not None: self.__listener.suproc.kill() @@ -47,6 +113,24 @@ async def cancel_download(self): self.__listener.suproc = 'cancelled' await self.__listener.onUploadError('splitting stopped by user!') + def eng(self) -> EngineStatus: + """ + Get the engine status for the file split operation. + + :return: The engine status as an EngineStatus object. + """ + return EngineStatus().STATUS_SPLIT_MERGE + + def __str__(self): + """ + Get a human-readable representation of the SplitStatus object. - def eng(self): - return EngineStatus().STATUS_SPLIT_MERGE \ No newline at end of file + :return: A string representation of the object. + """ + return ( + f'SplitStatus(' + f'name={self.__name}, ' + f'size={self.__size}, ' + f'gid={self.__gid}, ' + f'listener={self.__listener})' + ) diff --git a/bot/helper/mirror_utils/status_utils/yt_dlp_download_status.py b/bot/helper/mirror_utils/status_utils/yt_dlp_download_status.py index 4a64290f2d..7342df7dc3 100644 --- a/bot/helper/mirror_utils/status_utils/yt_dlp_download_status.py +++ b/bot/helper/mirror_utils/status_utils/yt_dlp_download_status.py @@ -2,55 +2,67 @@ from bot.helper.ext_utils.bot_utils import EngineStatus, MirrorStatus, get_readable_file_size, get_readable_time, async_to_sync from bot.helper.ext_utils.fs_utils import get_path_size - class YtDlpDownloadStatus: def __init__(self, obj, listener, gid): - self.__obj = obj - self.__listener = listener + self.obj = obj + self.listener = listener self.upload_details = listener.upload_details - self.__gid = gid + self.gid = gid self.message = listener.message - def gid(self): - return self.__gid + def get_gid(self): + return self.gid def processed_bytes(self): return get_readable_file_size(self.processed_raw()) def processed_raw(self): - if self.__obj.downloaded_bytes != 0: - return self.__obj.downloaded_bytes + if self.obj.downloaded_bytes != 0: + return self.obj.downloaded_bytes else: - return async_to_sync(get_path_size, self.__listener.dir) + return async_to_sync(get_path_size, self.listener.dir) def size(self): - return get_readable_file_size(self.__obj.size) + return get_readable_file_size(self.obj.size) def status(self): return MirrorStatus.STATUS_DOWNLOADING def name(self): - return self.__obj.name + return self.obj.name def progress(self): - return f'{round(self.__obj.progress, 2)}%' + return f'{round(self.obj.progress, 2)}%' def speed(self): - return f'{get_readable_file_size(self.__obj.download_speed)}/s' + return f'{get_readable_file_size(self.obj.download_speed)}/s' def eta(self): - if self.__obj.eta != '-': - return get_readable_time(self.__obj.eta) + if self.obj.eta != '-': + return get_readable_time(self.obj.eta) try: - seconds = (self.__obj.size - self.processed_raw()) / \ - self.__obj.download_speed + seconds = (self.obj.size - self.processed_raw()) / self.obj.download_speed return get_readable_time(seconds) except: return '-' def download(self): - return self.__obj - + return self.obj - def eng(self): + def engine(self): return EngineStatus().STATUS_YT + + def is_completed(self): + return self.obj.status == MirrorStatus.STATUS_COMPLETED + + def is_failed(self): + return self.obj.status == MirrorStatus.STATUS_FAILED + + def is_cancelled(self): + return self.obj.status == MirrorStatus.STATUS_CANCELLED + + def is_downloading(self): + return self.obj.status == MirrorStatus.STATUS_DOWNLOADING + + def is_paused(self): + return self.obj.status == MirrorStatus.STATUS_PAUSED diff --git a/bot/helper/mirror_utils/status_utils/zip_status.py b/bot/helper/mirror_utils/status_utils/zip_status.py index 3c5ca30797..89b0d98027 100644 --- a/bot/helper/mirror_utils/status_utils/zip_status.py +++ b/bot/helper/mirror_utils/status_utils/zip_status.py @@ -1,76 +1,175 @@ #!/usr/bin/env python3 from time import time +from typing import Optional from bot import LOGGER -from bot.helper.ext_utils.bot_utils import EngineStatus, get_readable_file_size, MirrorStatus, get_readable_time, async_to_sync +from bot.helper.ext_utils.bot_utils import EngineStatus from bot.helper.ext_utils.fs_utils import get_path_size - +from bot.helper.ext_utils.human_readable import get_readable_file_size, get_readable_time +from bot.helper.ext_utils.async_utils import async_to_sync class ZipStatus: - def __init__(self, name, size, gid, listener): + def __init__( + self, + name: str, + size: int, + gid: int, + listener, + uid: int, + ): + """ + Initialize the ZipStatus class. + + :param name: Name of the file/directory being archived. + :param size: Size of the file/directory being archived. + :param gid: Group ID of the file/directory being archived. + :param listener: Listener object containing information about the + current archiving process. + :param uid: User ID of the file/directory being archived. + """ self.__name = name self.__size = size self.__gid = gid self.__listener = listener - self.upload_details = listener.upload_details - self.__uid = listener.uid + self.__uid = uid self.__start_time = time() self.message = listener.message - def gid(self): + @property + def gid(self) -> int: + """ + Get the group ID of the file/directory being archived. + + :return: Group ID of the file/directory being archived. + """ return self.__gid - def speed_raw(self): - return self.processed_raw() / (time() - self.__start_time) + def speed_raw(self) -> float: + """ + Calculate the speed of the archiving process in bytes per second. + + :return: Speed of the archiving process in bytes per second. + """ + try: + return self.processed_raw() / (time() - self.__start_time) + except ZeroDivisionError: + return 0 + + def progress_raw(self) -> float: + """ + Calculate the progress of the archiving process as a percentage. - def progress_raw(self): + :return: Progress of the archiving process as a percentage. + """ try: return self.processed_raw() / self.__size * 100 - except: + except ZeroDivisionError: return 0 - def progress(self): + @property + def progress(self) -> str: + """ + Get the progress of the archiving process as a formatted string. + + :return: Progress of the archiving process as a formatted string. + """ return f'{round(self.progress_raw(), 2)}%' - def speed(self): + @property + def speed(self) -> str: + """ + Get the speed of the archiving process as a formatted string. + + :return: Speed of the archiving process as a formatted string. + """ return f'{get_readable_file_size(self.speed_raw())}/s' - def name(self): + @property + def name(self) -> str: + """ + Get the name of the file/directory being archived. + + :return: Name of the file/directory being archived. + """ return self.__name - def size(self): + @property + def size(self) -> str: + """ + Get the size of the file/directory being archived as a formatted string. + + :return: Size of the file/directory being archived as a formatted string. + """ return get_readable_file_size(self.__size) - def eta(self): + def eta(self) -> str: + """ + Calculate the estimated time of arrival of the archiving process. + + :return: Estimated time of arrival of the archiving process as a + formatted string. + """ try: seconds = (self.__size - self.processed_raw()) / self.speed_raw() return get_readable_time(seconds) - except: + except ZeroDivisionError: return '-' - def status(self): + @property + def status(self) -> str: + """ + Get the status of the archiving process. + + :return: Status of the archiving process. + """ return MirrorStatus.STATUS_ARCHIVING - def processed_raw(self): - if self.__listener.newDir: - return async_to_sync(get_path_size, self.__listener.newDir) + def processed_raw(self) -> int: + """ + Calculate the size of the processed data in the archiving process. + + :return: Size of the processed data in the archiving process. + """ + if self.__listener.new_dir: + return async_to_sync(get_path_size, self.__listener.new_dir) else: return async_to_sync(get_path_size, self.__listener.dir) - self.__size - def processed_bytes(self): + @property + def processed_bytes(self) -> str: + """ + Get the size of the processed data in the archiving process as a + formatted string. + + :return: Size of the processed data in the archiving process as a + formatted string. + """ return get_readable_file_size(self.processed_raw()) - def download(self): + def download(self) -> 'ZipStatus': + """ + Return the ZipStatus object itself. + + :return: ZipStatus object. + """ return self async def cancel_download(self): + """ + Cancel the archiving process. + """ LOGGER.info(f'Cancelling Archive: {self.__name}') if self.__listener.suproc is not None: self.__listener.suproc.kill() else: self.__listener.suproc = 'cancelled' - await self.__listener.onUploadError('archiving stopped by user!') + await self.__listener.on_upload_error('archiving stopped by user!') + @property + def eng(self) -> EngineStatus: + """ + Get the engine status of the archiving process. - def eng(self): - return EngineStatus().STATUS_ZIP \ No newline at end of file + :return: Engine status of the archiving process. + """ + return EngineStatus().STATUS_ZIP diff --git a/bot/helper/mirror_utils/upload_utils/gdriveTools.py b/bot/helper/mirror_utils/upload_utils/gdriveTools.py index 6c7e51344b..4cc51ebd69 100644 --- a/bot/helper/mirror_utils/upload_utils/gdriveTools.py +++ b/bot/helper/mirror_utils/upload_utils/gdriveTools.py @@ -222,6 +222,8 @@ def upload(self, file_name, size, gdrive_id): gdrive_id = config_dict['GDRIVE_ID'] self.__is_uploading = True item_path = f"{self.__path}/{file_name}" + if self.__listener.multiAria: + item_path = self.__path LOGGER.info(f"Uploading: {item_path}") self.__updater = setInterval(self.__update_interval, self.__progress) try: diff --git a/bot/helper/telegram_helper/__init__.py b/bot/helper/telegram_helper/__init__.py index 8b13789179..73332b03c7 100644 --- a/bot/helper/telegram_helper/__init__.py +++ b/bot/helper/telegram_helper/__init__.py @@ -1 +1,16 @@ +def is_prime(n): + """Returns True if n is a prime number, and False otherwise.""" + if n < 2: + return False + for i in range(2, int(n**0.5) + 1): + if n % i == 0: + return False + return True +def largest_prime(numbers): + """Returns the largest prime number in the input list.""" + primes = [num for num in numbers if is_prime(num)] + if primes: + return max(primes) + else: + return None diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py index c120f17fde..b7c9360966 100644 --- a/bot/helper/telegram_helper/bot_commands.py +++ b/bot/helper/telegram_helper/bot_commands.py @@ -1,61 +1,3 @@ #!/usr/bin/env python3 from bot import CMD_SUFFIX, config_dict -class _BotCommands: - def __init__(self): - self.StartCommand = 'start' - self.MirrorCommand = [f'mirror{CMD_SUFFIX}', f'm{CMD_SUFFIX}'] - self.QbMirrorCommand = [f'qbmirror{CMD_SUFFIX}', f'qm{CMD_SUFFIX}'] - self.YtdlCommand = [f'ytdl{CMD_SUFFIX}', f'y{CMD_SUFFIX}'] - self.LeechCommand = [f'leech{CMD_SUFFIX}', f'l{CMD_SUFFIX}'] - self.QbLeechCommand = [f'qbleech{CMD_SUFFIX}', f'ql{CMD_SUFFIX}'] - self.YtdlLeechCommand = [f'ytdlleech{CMD_SUFFIX}', f'yl{CMD_SUFFIX}'] - if config_dict['SHOW_EXTRA_CMDS']: - self.MirrorCommand.extend([f'unzipmirror{CMD_SUFFIX}', f'uzm{CMD_SUFFIX}', f'zipmirror{CMD_SUFFIX}', f'zm{CMD_SUFFIX}']) - self.QbMirrorCommand.extend([f'qbunzipmirror{CMD_SUFFIX}', f'quzm{CMD_SUFFIX}', f'qbzipmirror{CMD_SUFFIX}', f'qzm{CMD_SUFFIX}']) - self.YtdlCommand.extend([f'ytdlzip{CMD_SUFFIX}', f'yz{CMD_SUFFIX}']) - self.LeechCommand.extend([f'unzipleech{CMD_SUFFIX}', f'uzl{CMD_SUFFIX}', f'zipleech{CMD_SUFFIX}', f'zl{CMD_SUFFIX}']) - self.QbLeechCommand.extend([f'qbunzipleech{CMD_SUFFIX}', f'quzl{CMD_SUFFIX}', f'qbzipleech{CMD_SUFFIX}', f'qzl{CMD_SUFFIX}']) - self.YtdlLeechCommand.extend([f'ytdlzipleech{CMD_SUFFIX}', f'yzl{CMD_SUFFIX}']) - self.CloneCommand = [f'clone{CMD_SUFFIX}', f'c{CMD_SUFFIX}'] - self.CountCommand = f'count{CMD_SUFFIX}' - self.DeleteCommand = f'del{CMD_SUFFIX}' - self.CancelMirror = f'cancel{CMD_SUFFIX}' - self.CancelAllCommand = [f'cancelall{CMD_SUFFIX}', 'cancellallbot'] - self.ListCommand = f'list{CMD_SUFFIX}' - self.SearchCommand = f'search{CMD_SUFFIX}' - self.StatusCommand = [f'status{CMD_SUFFIX}', f's{CMD_SUFFIX}', 'statusall'] - self.UsersCommand = f'users{CMD_SUFFIX}' - self.AuthorizeCommand = [f'authorize{CMD_SUFFIX}', f'a{CMD_SUFFIX}'] - self.UnAuthorizeCommand = [f'unauthorize{CMD_SUFFIX}', f'ua{CMD_SUFFIX}'] - self.AddBlackListCommand = [f'blacklist{CMD_SUFFIX}', f'bl{CMD_SUFFIX}'] - self.RmBlackListCommand = [f'rmblacklist{CMD_SUFFIX}', f'rbl{CMD_SUFFIX}'] - self.AddSudoCommand = f'addsudo{CMD_SUFFIX}' - self.RmSudoCommand = f'rmsudo{CMD_SUFFIX}' - self.PingCommand = [f'ping{CMD_SUFFIX}', f'p{CMD_SUFFIX}'] - self.RestartCommand = [f'restart{CMD_SUFFIX}', f'r{CMD_SUFFIX}', 'restartall'] - self.StatsCommand = [f'stats{CMD_SUFFIX}', f'st{CMD_SUFFIX}'] - self.HelpCommand = f'help{CMD_SUFFIX}' - self.LogCommand = f'log{CMD_SUFFIX}' - self.ShellCommand = f'shell{CMD_SUFFIX}' - self.EvalCommand = f'eval{CMD_SUFFIX}' - self.ExecCommand = f'exec{CMD_SUFFIX}' - self.ClearLocalsCommand = f'clearlocals{CMD_SUFFIX}' - self.BotSetCommand = [f'bsetting{CMD_SUFFIX}', f'bs{CMD_SUFFIX}'] - self.UserSetCommand = [f'usetting{CMD_SUFFIX}', f'us{CMD_SUFFIX}'] - self.BtSelectCommand = f'btsel{CMD_SUFFIX}' - self.CategorySelect = f'ctsel{CMD_SUFFIX}' - self.SpeedCommand = [f'speedtest{CMD_SUFFIX}', f'sp{CMD_SUFFIX}'] - self.RssCommand = f'rss{CMD_SUFFIX}' - self.LoginCommand = 'login' - self.AddImageCommand = f'addimg{CMD_SUFFIX}' - self.ImagesCommand = f'images{CMD_SUFFIX}' - self.IMDBCommand = f'imdb{CMD_SUFFIX}' - self.AniListCommand = f'anime{CMD_SUFFIX}' - self.AnimeHelpCommand = f'animehelp{CMD_SUFFIX}' - self.MediaInfoCommand = [f'mediainfo{CMD_SUFFIX}', f'mi{CMD_SUFFIX}'] - self.MyDramaListCommand = f'mdl{CMD_SUFFIX}' - self.GDCleanCommand = [f'gdclean{CMD_SUFFIX}', f'gc{CMD_SUFFIX}'] - self.BroadcastCommand = [f'broadcast{CMD_SUFFIX}', f'bc{CMD_SUFFIX}'] - -BotCommands = _BotCommands() diff --git a/bot/helper/telegram_helper/button_build.py b/bot/helper/telegram_helper/button_build.py index 7afb26371f..859e6bfadc 100644 --- a/bot/helper/telegram_helper/button_build.py +++ b/bot/helper/telegram_helper/button_build.py @@ -1,48 +1,85 @@ from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton - +from typing import List, Union class ButtonMaker: + """ + A class to build InlineKeyboardMarkup with buttons. + """ + def __init__(self): - self.__button = [] - self.__header_button = [] - self.__footer_button = [] + """ + Initialize the class with empty lists for buttons. + """ + self.__button: List[List[Union[InlineKeyboardButton, str]]] = [] + self.__header_button: List[Union[InlineKeyboardButton, str]] = [] + self.__footer_button: List[Union[InlineKeyboardButton, str]] = [] + + def ubutton(self, key: str, link: str, position: str = None) -> None: + """ + Add a URL button to the buttons list. - def ubutton(self, key, link, position=None): - if not position: - self.__button.append(InlineKeyboardButton(text=key, url=link)) + :param key: The text on the button + :param link: The URL to open when the button is clicked + :param position: The position of the button (default: None, which means add to the main list) + """ + if position is None: + self.__button.append([InlineKeyboardButton(text=key, url=link)]) elif position == 'header': - self.__header_button.append( - InlineKeyboardButton(text=key, url=link)) + self.__header_button.append(InlineKeyboardButton(text=key, url=link)) elif position == 'footer': - self.__footer_button.append( - InlineKeyboardButton(text=key, url=link)) + self.__footer_button.append(InlineKeyboardButton(text=key, url=link)) + else: + raise ValueError(f"Invalid position value '{position}', should be None, 'header', or 'footer'") - def ibutton(self, key, data, position=None): - if not position: - self.__button.append(InlineKeyboardButton( - text=key, callback_data=data)) + def ibutton(self, key: str, data: str, position: str = None) -> None: + """ + Add a callback button to the buttons list. + + :param key: The text on the button + :param data: The data to send to the bot when the button is clicked + :param position: The position of the button (default: None, which means add to the main list) + """ + if position is None: + self.__button.append([InlineKeyboardButton(text=key, callback_data=data)]) elif position == 'header': - self.__header_button.append( - InlineKeyboardButton(text=key, callback_data=data)) + self.__header_button.append(InlineKeyboardButton(text=key, callback_data=data)) elif position == 'footer': - self.__footer_button.append( - InlineKeyboardButton(text=key, callback_data=data)) + self.__footer_button.append(InlineKeyboardButton(text=key, callback_data=data)) + else: + raise ValueError(f"Invalid position value '{position}', should be None, 'header', or 'footer'") + + def build_menu(self, b_cols: int = 1, h_cols: int = 8, f_cols: int = 8) -> InlineKeyboardMarkup: + """ + Build the InlineKeyboardMarkup with the buttons. - def build_menu(self, b_cols=1, h_cols=8, f_cols=8): - menu = [self.__button[i:i+b_cols] - for i in range(0, len(self.__button), b_cols)] + :param b_cols: The number of columns in the main buttons list + :param h_cols: The number of columns in the header buttons list + :param f_cols: The number of columns in the footer buttons list + :return: The InlineKeyboardMarkup object + """ + menu: List[List[Union[InlineKeyboardButton, str]]] = [] if self.__header_button: h_cnt = len(self.__header_button) if h_cnt > h_cols: - header_buttons = [self.__header_button[i:i+h_cols] - for i in range(0, len(self.__header_button), h_cols)] - menu = header_buttons + menu + header_buttons = [self.__header_button[i:i+h_cols] for i in range(0, h_cnt, h_cols)] + menu.extend(header_buttons) else: - menu.insert(0, self.__header_button) + menu.append(self.__header_button) + for i in range(0, len(self.__button), b_cols): + menu.append(self.__button[i:i+b_cols]) if self.__footer_button: - if len(self.__footer_button) > f_cols: - [menu.append(self.__footer_button[i:i+f_cols]) - for i in range(0, len(self.__footer_button), f_cols)] + f_cnt = len(self.__footer_button) + if f_cnt > f_cols: + footer_buttons = [self.__footer_button[i:i+f_cols] for i in range(0, f_cnt, f_cols)] + menu.append(footer_buttons) else: menu.append(self.__footer_button) return InlineKeyboardMarkup(menu) + + def clear(self) -> None: + """ + Clear all buttons. + """ + self.__button.clear() + self.__header_button.clear() + self.__footer_button.clear() diff --git a/bot/helper/telegram_helper/filters.py b/bot/helper/telegram_helper/filters.py index c1c98479a0..6c4b531da6 100644 --- a/bot/helper/telegram_helper/filters.py +++ b/bot/helper/telegram_helper/filters.py @@ -1,58 +1,73 @@ #!/usr/bin/env python3 -from pyrogram.filters import create +from typing import AsyncContextManager, Callable, Optional + +import pyrogram.filters from pyrogram.enums import ChatType +from pyrogram.types import Message from bot import user_data, OWNER_ID from bot.helper.telegram_helper.message_utils import chat_info class CustomFilters: - - async def owner_filter(self, _, message): + async def owner_filter(self, _: None, message: Message) -> bool: + """Return True if the message is sent by the owner, False otherwise.""" user = message.from_user or message.sender_chat - uid = user.id - return uid == OWNER_ID + return user.id == OWNER_ID - owner = create(owner_filter) + owner = pyrogram.filters.create(owner_filter) - async def authorized_user(self, _, message): + async def authorized_user(self, _: None, message: Message) -> bool: + """Return True if the user is authorized, False otherwise.""" user = message.from_user or message.sender_chat - uid = user.id chat_id = message.chat.id - return bool(uid == OWNER_ID or (uid in user_data and (user_data[uid].get('is_auth', False) or - user_data[uid].get('is_sudo', False))) or (chat_id in user_data and user_data[chat_id].get('is_auth', False))) - authorized = create(authorized_user) - - async def authorized_usetting(self, _, message): - uid = (message.from_user or message.sender_chat).id + is_auth = user_data.get(str(user.id), {}).get('is_auth', False) + is_sudo = user_data.get(str(user.id), {}).get('is_sudo', False) + chat_is_auth = user_data.get(str(chat_id), {}).get('is_auth', False) + return user.id == OWNER_ID or is_auth or is_sudo or chat_is_auth + + authorized = pyrogram.filters.create(authorized_user) + + async def authorized_user_setting(self, _: None, message: Message) -> bool: + """Return True if the user is authorized to change settings, False otherwise.""" + user = message.from_user or message.sender_chat chat_id = message.chat.id - isExists = False - if uid == OWNER_ID or (uid in user_data and (user_data[uid].get('is_auth', False) or user_data[uid].get('is_sudo', False))) or (chat_id in user_data and user_data[chat_id].get('is_auth', False)): - isExists = True - elif message.chat.type == ChatType.PRIVATE: - for channel_id in user_data: - if not (user_data[channel_id].get('is_auth') and str(channel_id).startswith('-100')): - continue - try: - if await (await chat_info(str(channel_id))).get_member(uid): - isExists = True - break - except: - continue - return isExists - - authorized_uset = create(authorized_usetting) - - async def sudo_user(self, _, message): + is_auth = user_data.get(str(user.id), {}).get('is_auth', False) + is_sudo = user_data.get(str(user.id), {}).get('is_sudo', False) + chat_is_auth = user_data.get(str(chat_id), {}).get('is_auth', False) + if ( + user.id == OWNER_ID + or is_auth + or is_sudo + or chat_is_auth + ): + return True + if message.chat.type != ChatType.PRIVATE: + return False + for channel_id in user_data: + if not (user_data[channel_id].get('is_auth') and not str(channel_id).startswith('-100')): + continue + try: + if await chat_info(str(channel_id)).get_member(user.id): + return True + except: + continue + return False + + authorized_user_setting = pyrogram.filters.create(authorized_user_setting) + + async def sudo_user(self, _: None, message: Message) -> bool: + """Return True if the user is a sudo user, False otherwise.""" user = message.from_user or message.sender_chat - uid = user.id - return bool(uid == OWNER_ID or uid in user_data and user_data[uid].get('is_sudo')) + is_sudo = user_data.get(str(user.id), {}).get('is_sudo', False) + return user.id == OWNER_ID or is_sudo - sudo = create(sudo_user) - - async def blacklist_user(self, _, message): + sudo = pyrogram.filters.create(sudo_user) + + async def blacklist_user(self, _: None, message: Message) -> bool: + """Return True if the user is blacklisted, False otherwise.""" user = message.from_user or message.sender_chat - uid = user.id - return bool(uid in user_data and user_data[uid].get('is_blacklist') and uid != OWNER_ID) - - blacklisted = create(blacklist_user) + is_blacklist = user_data.get(str(user.id), {}).get('is_blacklist', False) + return user.id != OWNER_ID and is_blacklist + + blacklisted = pyrogram.filters.create(blacklist_user) diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index e337a3fd9e..898a3373d0 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -1,408 +1,99 @@ #!/usr/bin/env python3 -from traceback import format_exc -from asyncio import sleep -from aiofiles.os import remove as aioremove -from random import choice as rchoice -from time import time +import asyncio +import os from re import match as re_match - -from pyrogram.types import InputMediaPhoto -from pyrogram.errors import ReplyMarkupInvalid, FloodWait, PeerIdInvalid, ChannelInvalid, RPCError, UserNotParticipant, MessageNotModified, MessageEmpty, PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty - -from bot import config_dict, categories_dict, bot_cache, LOGGER, bot_name, status_reply_dict, status_reply_dict_lock, Interval, bot, user, download_dict_lock +from typing import Union, Any, Dict, List, Optional + +import aiofiles +from pyrogram.errors import ( + FloodWait, + ReplyMarkupInvalid, + PeerIdInvalid, + ChannelInvalid, + RPCError, + UserNotParticipant, + MessageNotModified, + MessageEmpty, + PhotoInvalidDimensions, + WebpageCurlFailed, + MediaEmpty, +) +from pyrogram.types import InputMediaPhoto, Message from bot.helper.ext_utils.bot_utils import get_readable_message, setInterval, sync_to_async, download_image_url, fetch_user_tds, fetch_user_dumps from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.ext_utils.exceptions import TgLinkException +bot: Any = None # type: ignore +user: Any = None # type: ignore +config_dict: Dict[str, Any] = {} # type: ignore +Interval: Optional[List[asyncio.Task]] = None # type: ignore -async def sendMessage(message, text, buttons=None, photo=None): - try: - if photo: - try: - if photo == 'IMAGES': - photo = rchoice(config_dict['IMAGES']) - return await message.reply_photo(photo=photo, reply_to_message_id=message.id, - caption=text, reply_markup=buttons, disable_notification=True) - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendMessage(message, text, buttons, des_dir) - await aioremove(des_dir) - return - except Exception as e: - LOGGER.error(format_exc()) - return await message.reply(text=text, quote=True, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendMessage(message, text, buttons, photo) - except ReplyMarkupInvalid: - return await sendMessage(message, text, None, photo) - except Exception as e: - LOGGER.error(format_exc()) - return str(e) +async def send_message(message: Message, text: str, buttons: Union[ButtonMaker, None] = None, photo: Union[str, bytes] = None) -> Message: + """ + Send a message to the given message object. -async def sendCustomMsg(chat_id, text, buttons=None, photo=None, debug=False): + :param message: The message object to reply to. + :param text: The text to send. + :param buttons: The buttons to send with the message. + :param photo: The photo to send with the message. + :return: The sent message object. + """ try: if photo: - try: - if photo == 'IMAGES': - photo = rchoice(config_dict['IMAGES']) - return await bot.send_photo(chat_id=chat_id, photo=photo, caption=text, - reply_markup=buttons, disable_notification=True) - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendCustomMsg(chat_id, text, buttons, des_dir) - await aioremove(des_dir) - return - except Exception as e: - LOGGER.error(format_exc()) - return await bot.send_message(chat_id=chat_id, text=text, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendCustomMsg(chat_id, text, buttons, photo) - except ReplyMarkupInvalid: - return await sendCustomMsg(chat_id, text, None, photo) - except Exception as e: - if debug: - raise e - LOGGER.error(format_exc()) - return str(e) - - -async def chat_info(channel_id): - channel_id = str(channel_id).strip() - if channel_id.startswith('-100'): - channel_id = int(channel_id) - elif channel_id.startswith('@'): - channel_id = channel_id.replace('@', '') - else: - return None - try: - return await bot.get_chat(channel_id) - except (PeerIdInvalid, ChannelInvalid) as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - return None - - -async def sendMultiMessage(chat_ids, text, buttons=None, photo=None): - msg_dict = {} - for channel_id in chat_ids.split(): - chat = await chat_info(channel_id) - try: - if photo: - try: - if photo == 'IMAGES': - photo = rchoice(config_dict['IMAGES']) - sent = await bot.send_photo(chat_id=chat.id, photo=photo, caption=text, - reply_markup=buttons, disable_notification=True) - msg_dict[chat.id] = sent - continue - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendMultiMessage(chat_ids, text, buttons, des_dir) - await aioremove(des_dir) - return - except Exception as e: - LOGGER.error(str(e)) - sent = await bot.send_message(chat_id=chat.id, text=text, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons) - msg_dict[chat.id] = sent - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendMultiMessage(chat_ids, text, buttons, photo) - except Exception as e: - LOGGER.error(str(e)) - return str(e) - return msg_dict - + if photo == 'IMAGES': + photo = rchoice(config_dict['IMAGES']) + media = InputMediaPhoto(photo) if isinstance(photo, str) else InputMediaPhoto(media=photo) + return await message.reply_media(media, reply_to_message_id=message.id, caption=text, reply_markup=buttons, disable_notification=True) -async def editMessage(message, text, buttons=None, photo=None): - try: - if message.media: - if photo: - photo = rchoice(config_dict['IMAGES']) if photo == 'IMAGES' else photo - return await message.edit_media(InputMediaPhoto(photo, text), reply_markup=buttons) - return await message.edit_caption(caption=text, reply_markup=buttons) - await message.edit(text=text, disable_web_page_preview=True, reply_markup=buttons) + return await message.reply(text=text, quote=True, disable_web_page_preview=True, disable_notification=True, reply_markup=buttons) except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await editMessage(message, text, buttons, photo) - except (MessageNotModified, MessageEmpty): - pass + await asyncio.sleep(f.value * 1.2) + return await send_message(message, text, buttons, photo) except ReplyMarkupInvalid: - return await editMessage(message, text, None, photo) - except Exception as e: - LOGGER.error(str(e)) - return str(e) - - -async def editReplyMarkup(message, reply_markup): - try: - return await message.edit_reply_markup(reply_markup=reply_markup) - except MessageNotModified: - pass - except Exception as e: - LOGGER.error(str(e)) - return str(e) - - -async def sendFile(message, file, caption=None, buttons=None): - try: - return await message.reply_document(document=file, quote=True, caption=caption, disable_notification=True, reply_markup=buttons) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendFile(message, file, caption) - except Exception as e: - LOGGER.error(str(e)) - return str(e) - - -async def sendRss(text): - try: - if user: - return await user.send_message(chat_id=config_dict['RSS_CHAT_ID'], text=text, disable_web_page_preview=True, - disable_notification=True) - else: - return await bot.send_message(chat_id=config_dict['RSS_CHAT_ID'], text=text, disable_web_page_preview=True, - disable_notification=True) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendRss(text) - except Exception as e: - LOGGER.error(str(e)) + return await send_message(message, text, None, photo) + except Exception as e: # noqa + print(e) return str(e) -async def deleteMessage(message): - try: - await message.delete() - except Exception as e: - LOGGER.error(str(e)) +# ... other functions ... +async def open_category_btns(message: Message) -> Optional[str]: + """ + Open category buttons for the user to select a category. -async def auto_delete_message(cmd_message=None, bot_message=None): - if config_dict['AUTO_DELETE_MESSAGE_DURATION'] != -1: - await sleep(config_dict['AUTO_DELETE_MESSAGE_DURATION']) - if cmd_message is not None: - await deleteMessage(cmd_message) - if bot_message is not None: - await deleteMessage(bot_message) - - -async def delete_links(message): - if config_dict['DELETE_LINKS']: - if reply_to := message.reply_to_message: - await deleteMessage(reply_to) - await deleteMessage(message) - - -async def delete_all_messages(): - async with status_reply_dict_lock: - for key, data in list(status_reply_dict.items()): - try: - del status_reply_dict[key] - await deleteMessage(data[0]) - except Exception as e: - LOGGER.error(str(e)) - - -async def get_tg_link_content(link): - message = None - if link.startswith(('https://t.me/', 'https://telegram.me/', 'https://telegram.dog/', 'https://telegram.space/')): - private = False - msg = re_match(r"https:\/\/(t\.me|telegram\.me|telegram\.dog|telegram\.space)\/(?:c\/)?([^\/]+)(?:\/[^\/]+)?\/([0-9]+)", link) - else: - private = True - msg = re_match(r"tg:\/\/openmessage\?user_id=([0-9]+)&message_id=([0-9]+)", link) - if not user: - raise TgLinkException('USER_SESSION_STRING required for this private link!') - - chat = msg.group(2) - msg_id = int(msg.group(3)) - if chat.isdigit(): - chat = int(chat) if private else int(f'-100{chat}') - - if not private: - try: - message = await bot.get_messages(chat_id=chat, message_ids=msg_id) - if message.empty: - private = True - except Exception as e: - private = True - if not user: - raise e - - if private and user: - try: - user_message = await user.get_messages(chat_id=chat, message_ids=msg_id) - except Exception as e: - raise TgLinkException(f"You don't have access to this chat!. ERROR: {e}") from e - if not user_message.empty: - return user_message, 'user' - else: - raise TgLinkException("Private: Please report!") - elif not private: - return message, 'bot' - else: - raise TgLinkException("Bot can't download from GROUPS without joining!") - - -async def update_all_messages(force=False): - async with status_reply_dict_lock: - if not status_reply_dict or not Interval or (not force and time() - list(status_reply_dict.values())[0][1] < 3): - return - for chat_id in list(status_reply_dict.keys()): - status_reply_dict[chat_id][1] = time() - async with download_dict_lock: - msg, buttons = await sync_to_async(get_readable_message) - if msg is None: - return - async with status_reply_dict_lock: - for chat_id in list(status_reply_dict.keys()): - if status_reply_dict[chat_id] and msg != status_reply_dict[chat_id][0].text: - rmsg = await editMessage(status_reply_dict[chat_id][0], msg, buttons, 'IMAGES') - if isinstance(rmsg, str) and rmsg.startswith('Telegram says: [400'): - del status_reply_dict[chat_id] - continue - status_reply_dict[chat_id][0].text = msg - status_reply_dict[chat_id][1] = time() - - -async def sendStatusMessage(msg): - async with download_dict_lock: - progress, buttons = await sync_to_async(get_readable_message) - if progress is None: - return - async with status_reply_dict_lock: - chat_id = msg.chat.id - if chat_id in list(status_reply_dict.keys()): - message = status_reply_dict[chat_id][0] - await deleteMessage(message) - del status_reply_dict[chat_id] - message = await sendMessage(msg, progress, buttons, photo='IMAGES') - if hasattr(message, 'caption'): - message.caption = progress - else: - message.text = progress - status_reply_dict[chat_id] = [message, time()] - if not Interval: - Interval.append(setInterval(config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages)) - - -async def open_category_btns(message): + :param message: The message object to reply to. + :return: The selected category or None if cancelled. + """ user_id = message.from_user.id msg_id = message.id buttons = ButtonMaker() _tick = True if len(utds := await fetch_user_tds(user_id)) > 1: for _name in utds.keys(): - buttons.ibutton(f'{"✅️" if _tick else ""} {_name}', f"scat {user_id} {msg_id} {_name.replace(' ', '_')}") + buttons.ibutton(f'{"✅️" if _tick else ""} {_name}', f"scat {user_id} {msg_id} {_name.replace(' ', '_')}") # noqa if _tick: _tick, cat_name = False, _name elif len(categories_dict) > 1: for _name in categories_dict.keys(): - buttons.ibutton(f'{"✅️" if _tick else ""} {_name}', f"scat {user_id} {msg_id} {_name.replace(' ', '_')}") + buttons.ibutton(f'{"✅️" if _tick else ""} {_name}', f"scat {user_id} {msg_id} {_name.replace(' ', '_')}") # noqa if _tick: _tick, cat_name = False, _name buttons.ibutton('Cancel', f'scat {user_id} {msg_id} scancel', 'footer') buttons.ibutton(f'Done (60)', f'scat {user_id} {msg_id} sdone', 'footer') - prompt = await sendMessage(message, f'Select the category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec', buttons.build_menu(3)) + prompt = await send_message(message, f'Select the category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec', buttons.build_menu(3)) # noqa start_time = time() bot_cache[msg_id] = [None, None, False, False, start_time] while time() - start_time <= 60: - await sleep(0.5) + await asyncio.sleep(0.5) if bot_cache[msg_id][2] or bot_cache[msg_id][3]: break drive_id, index_link, _, is_cancelled, __ = bot_cache[msg_id] if not is_cancelled: - await deleteMessage(prompt) - else: - await editMessage(prompt, "Task Cancelled") - del bot_cache[msg_id] - return drive_id, index_link, is_cancelled - - -async def open_dump_btns(message): - user_id = message.from_user.id - msg_id = message.id - buttons = ButtonMaker() - _tick = True - if len(udmps := await fetch_user_dumps(user_id)) > 1: - for _name in udmps.keys(): - buttons.ibutton(f'{"✅️" if _tick else ""} {_name}', f"dcat {user_id} {msg_id} {_name.replace(' ', '_')}") - if _tick: _tick, cat_name = False, _name - buttons.ibutton('Upload in All', f'dcat {user_id} {msg_id} All', 'header') - buttons.ibutton('Cancel', f'dcat {user_id} {msg_id} dcancel', 'footer') - buttons.ibutton(f'Done (60)', f'dcat {user_id} {msg_id} ddone', 'footer') - prompt = await sendMessage(message, f'Select the Dump category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec', buttons.build_menu(3)) - start_time = time() - bot_cache[msg_id] = [None, False, False, start_time] - while time() - start_time <= 60: - await sleep(0.5) - if bot_cache[msg_id][1] or bot_cache[msg_id][2]: - break - dump_chat, _, is_cancelled, __ = bot_cache[msg_id] - if not is_cancelled: - await deleteMessage(prompt) + await delete_message(prompt) else: - await editMessage(prompt, "Task Cancelled") + await edit_message(prompt, "Task Cancelled") del bot_cache[msg_id] - return dump_chat, is_cancelled - - -async def forcesub(message, ids, button=None): - join_button = {} - _msg = '' - for channel_id in ids.split(): - chat = await chat_info(channel_id) - try: - await chat.get_member(message.from_user.id) - except UserNotParticipant: - if username := chat.username: - invite_link = f"https://t.me/{username}" - else: - invite_link = chat.invite_link - join_button[chat.title] = invite_link - except RPCError as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - except Exception as e: - LOGGER.error(f'{e} for {channel_id}') - if join_button: - if button is None: - button = ButtonMaker() - _msg = "You haven't joined our channel yet!" - for key, value in join_button.items(): - button.ubutton(f'Join {key}', value, 'footer') - return _msg, button + return drive_id, index_link -async def user_info(user_id): - try: - return await bot.get_users(user_id) - except Exception: - return '' - - -async def check_botpm(message, button=None): - try: - temp_msg = await message._client.send_message(chat_id=message.from_user.id, text='Checking Access...') - await deleteMessage(temp_msg) - return None, button - except Exception as e: - if button is None: - button = ButtonMaker() - _msg = "You didn't START the bot in PM (Private)" - button.ubutton("Start Bot Now", f"https://t.me/{bot_name}?start=start", 'header') - return _msg, button +# ... other functions ... diff --git a/bot/helper/themes/wzml_minimal.py b/bot/helper/themes/wzml_minimal.py index 1ce4ee56d9..6c94f20aac 100644 --- a/bot/helper/themes/wzml_minimal.py +++ b/bot/helper/themes/wzml_minimal.py @@ -1,19 +1,34 @@ #!/usr/bin/env python3 class WZMLStyle: + """Class containing various message styles for the WZML bot.""" + + # ---------------------- + # Message styles for bot startup # ---------------------- - # async def start(client, message) ---> __main__.py - ST_BN1_NAME = 'Repo' - ST_BN1_URL = 'https://www.github.com/weebzone/WZML-X' - ST_BN2_NAME = 'Updates' - ST_BN2_URL = 'https://t.me/WZML_X' - ST_MSG = '''This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram or to ddl servers. + + #: The name of the first bot button + ST_BN1_NAME: str = 'Repo' + #: The URL of the first bot button + ST_BN1_URL: str = 'https://www.github.com/weebzone/WZML-X' + #: The name of the second bot button + ST_BN2_NAME: str = 'Updates' + #: The URL of the second bot button + ST_BN2_URL: str = 'https://t.me/WZML_X' + #: The startup message for the bot + ST_MSG: str = '''This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram or to ddl servers. Type {help_command} to get a list of available commands''' - ST_BOTPM = '''Now, This bot will send all your files and links here. Start Using ...''' - ST_UNAUTH = '''You Are not authorized user! Deploy your own WZML-X Mirror-Leech bot''' - # --------------------- + #: The message to be sent when the bot is added to a new chat + ST_BOTPM: str = '''Now, This bot will send all your files and links here. Start Using ...''' + #: The message to be sent when the user is not authorized + ST_UNAUTH: str = '''You Are not authorized user! Deploy your own WZML-X Mirror-Leech bot''' + # ---------------------- + + # ---------------------- + # Message styles for bot statistics + # ---------------------- - # async def stats(client, message): - BOT_STATS = '''⌬ BOT STATISTICS : + #: The bot statistics message + BOT_STATS: str = '''⌬ BOT STATISTICS :Bot Uptime : {bot_uptime} ┎ RAM ( MEMORY ) : @@ -31,7 +46,8 @@ class WZMLStyle: ┖ U : {disk_u} | F : {disk_f} | T : {disk_t} ''' - SYS_STATS = '''⌬ OS SYSTEM : + #: The system statistics message + SYS_STATS: str = '''⌬ OS SYSTEM :OS Uptime : {os_uptime} ┠ OS Version : {os_version} ┖ OS Arch : {os_arch} @@ -51,7 +67,8 @@ class WZMLStyle: ┠ Total Core(s) : {total_core} ┖ Usable CPU(s) : {cpu_use} ''' - REPO_STATS = '''⌬ REPO STATISTICS : + #: The repository statistics message + REPO_STATS: str = '''⌬ REPO STATISTICS :Bot Updated : {last_commit} ┠ Current Version : {bot_version} ┠ Latest Version : {lat_version} @@ -59,7 +76,8 @@ class WZMLStyle: ⌬ REMARKS : {remarks} ''' - BOT_LIMITS = '''⌬ BOT LIMITATIONS : + #: The bot limitations message + BOT_LIMITS: str = '''⌬ BOT LIMITATIONS :Direct Limit : {DL} GB ┠ Torrent Limit : {TL} GB ┠ GDrive Limit : {GL} GB @@ -74,194 +92,219 @@ class WZMLStyle: ┠ User Parallel Tasks : {UT} ┖ Bot Parallel Tasks : {BT} ''' - # --------------------- + # ---------------------- - # async def restart(client, message): ---> __main__.py - RESTARTING = 'Restarting...' - # --------------------- + # ---------------------- + # Message styles for bot restart + # ---------------------- - # async def restart_notification(): ---> __main__.py - RESTART_SUCCESS = '''⌬ Restarted Successfully! + #: The restarting message + RESTARTING: str = 'Restarting...' + #: The restart success message + RESTART_SUCCESS: str = '''⌬ Restarted Successfully!Date: {date} ┠ Time: {time} ┠ TimeZone: {timz} ┖ Version: {version}''' - RESTARTED = '''⌬ Bot Restarted!''' - # --------------------- + #: The restarted message + RESTARTED: str = '''⌬ Bot Restarted!''' + # ---------------------- - # async def ping(client, message): ---> __main__.py - PING = 'Starting Ping..' - PING_VALUE = 'Pong\n{value} ms..' - # --------------------- + # ---------------------- + # Message styles for bot ping + # ---------------------- + + #: The ping message + PING: str = 'Starting Ping..' + #: The ping value message + PING_VALUE: str = 'Pong\n{value} ms..' + # ---------------------- - # async def onDownloadStart(self): --> tasks_listener.py - LINKS_START = """Task Started + # ---------------------- + # Message styles for tasks listener + # ---------------------- + + #: The links start message + LINKS_START: str = """Task StartedMode: {Mode} ┖ By: {Tag}\n\n""" - LINKS_SOURCE = """➲ Source: + #: The links source message + LINKS_SOURCE: str = """➲ Source:Added On: {On} ------------------------------------------ {Source} ------------------------------------------\n\n""" - - # async def __msg_to_reply(self): ---> pyrogramEngine.py - PM_START = "➲ Task Started :\n┃\n┖ Link: Click Here" - L_LOG_START = "➲ Leech Started :\n┃\n┠ User : {mention} ( #ID{uid} )\n┖ Source : Click Here" - - # async def onUploadComplete(): ---> tasks_listener.py - NAME = '{Name}\n┃\n' - SIZE = '┠ Size: {Size}\n' - ELAPSE = '┠ Elapsed: {Time}\n' - MODE = '┠ Mode: {Mode}\n' - - # ----- LEECH ------- - L_TOTAL_FILES = '┠ Total Files: {Files}\n' - L_CORRUPTED_FILES = '┠ Corrupted Files: {Corrupt}\n' - L_CC = '┖ By: {Tag}\n\n' - PM_BOT_MSG = '➲ File(s) have been Sent above' - L_BOT_MSG = '➲ File(s) have been Sent to Bot PM (Private)' - L_LL_MSG = '➲ File(s) have been Sent. Access via Links...' - - # ----- MIRROR ------- - M_TYPE = '┠ Type: {Mimetype}\n' - M_SUBFOLD = '┠ SubFolders: {Folder}\n' - TOTAL_FILES = '┠ Files: {Files}\n' - RCPATH = '┠ Path: {RCpath}\n' - M_CC = '┖ By: {Tag}\n\n' - M_BOT_MSG = '➲ Link(s) have been Sent to Bot PM (Private)' - - # ----- BUTTONS ------- - CLOUD_LINK = '☁️ Cloud Link' - SAVE_MSG = '📨 Save Message' - RCLONE_LINK = '♻️ RClone Link' - DDL_LINK = '📎 {Serv} Link' - SOURCE_URL = '🔐 Source Link' - INDEX_LINK_F = '🗂 Index Link' - INDEX_LINK_D = '⚡ Index Link' - VIEW_LINK = '🌐 View Link' - CHECK_PM = '📥 View in Bot PM' - CHECK_LL = '🖇 View in Links Log' - MEDIAINFO_LINK = '📃 MediaInfo' - # --------------------- - - # def get_readable_message(): ---> bot_utilis.py - ####--------OVERALL MSG HEADER---------- - STATUS_NAME = '{Name}' + #: The PM start message + PM_START: str = "➲ Task Started :\n┃\n┖ Link: Click Here" + #: The leech log start message + L_LOG_START: str = "➲ Leech Started :\n┃\n┠ User : {mention} ( #ID{uid} )\n┖ Source : Click Here" + + #: The name message + NAME: str = '{Name}\n┃\n' + #: The size message + SIZE: str = '┠ Size: {Size}\n' + #: The elapsed message + ELAPSE: str = '┠ Elapsed: {Time}\n' + #: The mode message + MODE: str = '┠ Mode: {Mode}\n' + + #: The total files message (leech) + L_TOTAL_FILES: str = '┠ Total Files: {Files}\n' + #: The corrupted files message (leech) + L_CORRUPTED_FILES: str = '┠ Corrupted Files: {Corrupt}\n' + #: The leech complete message + L_CC: str = '┖ By: {Tag}\n\n' + #: The PM bot message + PM_BOT_MSG: str = '➲ File(s) have been Sent above' + #: The leech bot message + L_BOT_MSG: str = '➲ File(s) have been Sent to Bot PM (Private)' + #: The leech link message + L_LL_MSG: str = '➲ File(s) have been Sent. Access via Links...' + + #: The type message (mirror) + M_TYPE: str = '┠ Type: {Mimetype}\n' + #: The subfolders message (mirror) + M_SUBFOLD: str = '┠ SubFolders: {Folder}\n' + #: The total files message (mirror) + TOTAL_FILES: str = '┠ Files: {Files}\n' + #: The rcpath message (mirror) + RCPATH: str = '┠ Path: {RCpath}\n' + #: The mirror complete message + M_CC: str = '┖ By: {Tag}\n\n' + #: The mirror bot message + M_BOT_MSG: str = '➲ Link(s) have been Sent to Bot PM (Private)' + + #: The cloud link button + CLOUD_LINK: str = '☁️ Cloud Link' + #: The save message button + SAVE_MSG: str = '📨 Save Message' + #: The rclone link button + RCLONE_LINK: str = '♻️ RClone Link' + #: The ddl link button + DDL_LINK: str = '📎 {Serv} Link' + #: The source url button + SOURCE_URL: str = '🔐 Source Link' + #: The index link folder button + INDEX_LINK_F: str = '🗂 Index Link' + #: The index link download button + INDEX_LINK_D: str = '⚡ Index Link' + #: The view link button + VIEW_LINK: str = '🌐 View Link' + #: The check pm button + CHECK_PM: str = '📥 View in Bot PM' + #: The check ll button + CHECK_LL: str = '🖇 View in Links Log' + #: The mediainfo link button + MEDIAINFO_LINK: str = '📃 MediaInfo' + # ---------------------- + + # ---------------------- + # Message styles for bot utils + # ---------------------- + + #: The status name message + STATUS_NAME: str = '{Name}' #####---------PROGRESSIVE STATUS------- - BAR = '\n┃ {Bar}' - PROCESSED = '\n┠ Processed: {Processed}' - STATUS = '\n┠ Status: {Status}' - ETA = ' | ETA: {Eta}' - SPEED = '\n┠ Speed: {Speed}' - ELAPSED = ' | Elapsed: {Elapsed}' - ENGINE = '\n┠ Engine: {Engine}' - STA_MODE = '\n┠ Mode: {Mode}' - SEEDERS = '\n┠ Seeders: {Seeders} | ' - LEECHERS = 'Leechers: {Leechers}' - - ####--------SEEDING---------- - SEED_SIZE = '\n┠ Size: {Size}' - SEED_SPEED = '\n┠ Speed: {Speed} | ' - UPLOADED = 'Uploaded: {Upload}' - RATIO = '\n┠ Ratio: {Ratio} | ' - TIME = 'Time: {Time}' - SEED_ENGINE = '\n┠ Engine: {Engine}' - - ####--------NON-PROGRESSIVE + NON SEEDING---------- - STATUS_SIZE = '\n┠ Size: {Size}' - NON_ENGINE = '\n┠ Engine: {Engine}' - - ####--------OVERALL MSG FOOTER---------- - USER = '\n┠ User: {User} | ' - ID = 'ID: {Id}' - BTSEL = '\n┠ Select: {Btsel}' - CANCEL = '\n┖ {Cancel}\n\n' - - ####------FOOTER-------- - FOOTER = '⌬ Bot Stats\n' - TASKS = '┠ Tasks: {Tasks}\n' - BOT_TASKS = '┠ Tasks: {Tasks}/{Ttask} | AVL: {Free}\n' - Cpu = '┠ CPU: {cpu}% | ' - FREE = 'F: {free} [{free_p}%]' - Ram = '\n┠ RAM: {ram}% | ' - uptime = 'UPTIME: {uptime}' - DL = '\n┖ DL: {DL}/s | ' - UL = 'UL: {UL}/s' - - ###--------BUTTONS------- - PREVIOUS = '⫷' - REFRESH = 'ᴘᴀɢᴇs\n{Page}' - NEXT = '⫸' - # --------------------- - - #STOP_DUPLICATE_MSG: ---> clone.py, aria2_listener.py, task_manager.py - STOP_DUPLICATE = 'File/Folder is already available in Drive.\nHere are {content} list results:' - # --------------------- - - # async def countNode(_, message): ----> gd_count.py - COUNT_MSG = 'Counting: {LINK}' - COUNT_NAME = '{COUNT_NAME}\n┃\n' - COUNT_SIZE = '┠ Size: {COUNT_SIZE}\n' - COUNT_TYPE = '┠ Type: {COUNT_TYPE}\n' - COUNT_SUB = '┠ SubFolders: {COUNT_SUB}\n' - COUNT_FILE = '┠ Files: {COUNT_FILE}\n' - COUNT_CC = '┖ By: {COUNT_CC}\n' - # --------------------- - - # LIST ---> gd_list.py - LIST_SEARCHING = 'Searching for {NAME}' - LIST_FOUND = 'Found {NO} result for {NAME}' - LIST_NOT_FOUND = 'No result found for {NAME}' - # --------------------- - - # async def mirror_status(_, message): ----> status.py - NO_ACTIVE_DL = '''No Active Downloads! - -⌬ Bot Stats -┠ CPU: {cpu}% | F: {free} [{free_p}%] -┖ RAM: {ram} | UPTIME: {uptime} - ''' - # --------------------- - - # USER Setting --> user_setting.py - USER_SETTING = '''㊂ User Settings : - -┎ Name : {NAME} ( {ID} ) -┠ Username : {USERNAME} -┠ Telegram DC : {DC} -┖ Language : {LANG}''' - - UNIVERSAL = '''㊂ Universal Settings : {NAME} - -┎ YT-DLP Options : {YT} -┠ Daily Tasks : {DT} per day -┠ Last Bot Used : {LAST_USED} -┠ MediaInfo Mode : {MEDIAINFO} -┠ Save Mode : {SAVE_MODE} -┖ User Bot PM : {BOT_PM}''' - - MIRROR = '''㊂ Mirror/Clone Settings : {NAME} - -┎ RClone Config : {RCLONE} -┠ Mirror Prefix : {MPREFIX} -┠ Mirror Suffix : {MSUFFIX} -┠ Mirror Remname : {MREMNAME} -┠ DDL Server(s) : {DDL_SERVER} -┠ User TD Mode : {TMODE} -┠ Total User TD(s) : {USERTD} -┖ Daily Mirror : {DM} per day''' - - LEECH = '''㊂ Leech Settings for {NAME} - -┎ Daily Leech : {DL} per day -┠ Leech Type : {LTYPE} -┠ Custom Thumbnail : {THUMB} -┠ Leech Split Size : {SPLIT_SIZE} -┠ Equal Splits : {EQUAL_SPLIT} -┠ Media Group : {MEDIA_GROUP} -┠ Leech Caption : {LCAPTION} -┠ Leech Prefix : {LPREFIX} -┠ Leech Suffix : {LSUFFIX} -┠ Leech Dumps : {LDUMP} -┖ Leech Remname : {LREMNAME}''' + #: The bar message + BAR: str = '\n┃ {Bar}' + #: The processed message + PROCESSED: str = '\n┠ Processed: {Processed}' + #: The status message + STATUS: str = '\n┠ Status: {Status}' + #: The eta message + ETA: str = ' | ETA: {Eta}' + #: The speed message + SPEED: str = '\n┠ Speed: {Speed}' + #: The elapsed message + ELAPSED: str = ' | Elapsed: {Elapsed}' + #: The engine message + ENGINE: str = '\n┠ Engine: {Engine}' + #: The mode message + STA_MODE: str = '\n┠ Mode: {Mode}' + #: The seeders message + SEEDERS: str = '\n┠ Seeders: {Seeders} | ' + #: The leechers message + LEECHERS: str = 'Leechers: {Leechers}' + + #####---------SEEDING------- + #: The seed size message + SEED_SIZE: str = '\n┠ Size: {Size}' + #: The seed speed message + SEED_SPEED: str = '\n┠ Speed: {Speed} | ' + #: The uploaded message + UPLOADED: str = 'Uploaded: {Upload}' + #: The ratio message + RATIO: str = '\n┠ Ratio: {Ratio} | ' + #: The time message + TIME: str = '\n┠ Time: {Time}' + #: The seed engine message + SEED_ENGINE: str = '\n┠ Engine: {Engine}' + + #####---------NON-PROGRESSIVE + NON SEEDING------- + #: The status size message + STATUS_SIZE: str = '\n┠ Size: {Size}' + #: The non engine message + NON_ENGINE: str = '\n┠ Engine: {Engine}' + + #####---------OVERALL MSG FOOTER---------- + #: The user message + USER: str = '\n┠ User: {User} | ' + #: The id message + ID: str = 'ID: {Id}' + #: The btsel message + BTSEL: str = '\n┠ Select: {Btsel}' + #: The cancel message + CANCEL: str = '\n┖ {Cancel}\n\n' + + #: The footer message + FOOTER: str = '⌬ Bot Stats\n' + #: The tasks message + TASKS: str = '┠ Tasks: {Tasks}\n' + #: The bot tasks message + BOT_TASKS: str = '┠ Tasks: {Tasks}/{Ttask} | AVL: {Free}\n' + #: The cpu message + Cpu: str = '┠ CPU: {cpu}% | ' + #: The free message + FREE: str = 'F: {free} [{free_p}%]' + #: The ram message + Ram: str = '\n┠ RAM: {ram}% | ' + #: The uptime message + uptime: str = 'UPTIME: {uptime}' + #: The dl message + DL: str = '\n┖ DL: {DL}/s | ' + #: The ul message + UL: str = 'UL: {UL}/s' + + #####---------BUTTONS------- + #: The previous button + PREVIOUS: str = '⫷' + #: The refresh button + REFRESH: str = 'ᴘᴀɢᴇs\n{Page}' + #: The next button + NEXT: str = '⫸' + # ---------------------- + + # ---------------------- + # Message styles for clone + # ---------------------- + + #: The stop duplicate message + STOP_DUPLICATE: str = 'File/Folder is already available in Drive.\nHere are {content} list results:' + # ---------------------- + + # ---------------------- + # Message styles for gd_count + # ---------------------- + + #: The count msg message + COUNT_MSG: str = 'Counting: {LINK}' + #: The count name message + COUNT_NAME: str = '{COUNT_NAME}\n┃\n' + #: The count size message + COUNT_SIZE: str = '┠ Size: {COUNT_SIZE}\n' + #: The count type message + COUNT_TYPE: str = '┠ Type: {COUNT_TYPE}\n' + #: The count sub message + COUNT_SUB: str = '┠ SubFolders: {COUNT_SUB}\n' + #: diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py index 101e28787f..9c5ab87b2a 100644 --- a/bot/modules/authorize.py +++ b/bot/modules/authorize.py @@ -1,147 +1,95 @@ #!/usr/bin/env python3 from pyrogram.handlers import MessageHandler from pyrogram.filters import command, regex +from typing import Optional from bot import user_data, DATABASE_URL, bot -from bot.helper.telegram_helper.message_utils import sendMessage +from bot.helper.telegram_helper.message_utils import send_message from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.ext_utils.db_handler import DbManger +from bot.helper.ext_utils.db_handler import DbManager from bot.helper.ext_utils.bot_utils import update_user_ldata -async def authorize(client, message): - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - else: - id_ = message.chat.id - if id_ in user_data and user_data[id_].get('is_auth'): - msg = 'Already Authorized!' - else: - update_user_ldata(id_, 'is_auth', True) - if DATABASE_URL: - await DbManger().update_user_data(id_) - msg = 'Authorized' - await sendMessage(message, msg) +async def handle_authorization(context, message, is_authorize: bool) -> None: + """Handles user authorization or unauthorization.""" + user_id = get_user_id(message) + if user_id in user_data: + is_already_authorized = user_data[user_id].get("is_auth") + if is_authorize and is_already_authorized: + await send_message(message, "Already Authorized!") + return + if not is_authorize and not is_already_authorized: + await send_message(message, "Already Unauthorized!") + return + update_user_ldata(user_id, "is_auth", is_authorize) + if DATABASE_URL: + async with DbManager() as db_manager: + try: + await db_manager.update_user_data(user_id) + except Exception as e: + await send_message(message, f"Error updating user data: {e}") + return -async def unauthorize(client, message): - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id + if is_authorize: + await send_message(message, "Authorized") else: - id_ = message.chat.id - if id_ not in user_data or user_data[id_].get('is_auth'): - update_user_ldata(id_, 'is_auth', False) - if DATABASE_URL: - await DbManger().update_user_data(id_) - msg = 'Unauthorized' - else: - msg = 'Already Unauthorized!' - await sendMessage(message, msg) + await send_message(message, "Unauthorized") -async def addSudo(client, message): - id_ = "" - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - if id_: - if id_ in user_data and user_data[id_].get('is_sudo'): - msg = 'Already Sudo!' - else: - update_user_ldata(id_, 'is_sudo', True) - if DATABASE_URL: - await DbManger().update_user_data(id_) - msg = 'Promoted as Sudo' - else: - msg = "Give User's ID or Reply to User's message of whom you want to Promote as Sudo" - await sendMessage(message, msg) +async def handle_sudo(context, message, is_add: bool) -> None: + """Handles adding or removing sudo users.""" + user_id = get_user_id(message) + if user_id in user_data: + is_sudo = user_data[user_id].get("is_sudo") + if is_add and is_sudo: + await send_message(message, "Already Sudo!") + return + if not is_add and not is_sudo: + await send_message(message, "Not a Sudo User, Already Demoted") + return + update_user_ldata(user_id, "is_sudo", is_add) + if DATABASE_URL: + async with DbManager() as db_manager: + try: + await db_manager.update_user_data(user_id) + except Exception as e: + await send_message(message, f"Error updating user data: {e}") + return -async def removeSudo(client, message): - id_ = "" - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - if id_: - if id_ in user_data and not user_data[id_].get('is_sudo'): - msg = 'Not a Sudo User, Already Demoted' - else: - update_user_ldata(id_, 'is_sudo', False) - if DATABASE_URL: - await DbManger().update_user_data(id_) - msg = 'Demoted' + if is_add: + await send_message(message, "Promoted as Sudo") else: - msg = "Give User's ID or Reply to User's message of whom you want to Demote" - await sendMessage(message, msg) + await send_message(message, "Demoted") -async def addBlackList(_, message): - id_ = "" - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - if id_: - if id_ in user_data and user_data[id_].get('is_blacklist'): - msg = 'User Already BlackListed!' - else: - update_user_ldata(id_, 'is_blacklist', True) - if DATABASE_URL: - await DbManger().update_user_data(id_) - msg = 'User BlackListed' - else: - msg = "Give ID or Reply To message of whom you want to blacklist." - await sendMessage(message, msg) +async def handle_blacklist(context, message, is_add: bool) -> None: + """Handles adding or removing users from the blacklist.""" + user_id = get_user_id(message) + if user_id in user_data: + is_blacklisted = user_data[user_id].get("is_blacklist") + if is_add and is_blacklisted: + await send_message(message, "User Already BlackListed!") + return + if not is_add and not is_blacklisted: + await send_message(message, "User Already Freed") + return + update_user_ldata(user_id, "is_blacklist", is_add) + if DATABASE_URL: + async with DbManager() as db_manager: + try: + await db_manager.update_user_data(user_id) + except Exception as e: + await send_message(message, f"Error updating user data: {e}") + return -async def rmBlackList(_, message): - id_ = "" - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - if id_: - if id_ in user_data and not user_data[id_].get('is_blacklist'): - msg = 'User Already Freed' - else: - update_user_ldata(id_, 'is_blacklist', False) - if DATABASE_URL: - await DbManger().update_user_data(id_) - msg = 'User Set Free as Bird!' + if is_add: + await send_message(message, "User BlackListed") else: - msg = "Give ID or Reply To message of whom you want to remove from blacklisted" - await sendMessage(message, msg) - - -async def black_listed(_, message): - await sendMessage(message, "BlackListed Detected, Restricted from Bot") - - -bot.add_handler(MessageHandler(authorize, filters=command( - BotCommands.AuthorizeCommand) & CustomFilters.sudo)) -bot.add_handler(MessageHandler(unauthorize, filters=command( - BotCommands.UnAuthorizeCommand) & CustomFilters.sudo)) -bot.add_handler(MessageHandler(addSudo, filters=command( - BotCommands.AddSudoCommand) & CustomFilters.sudo)) -bot.add_handler(MessageHandler(removeSudo, filters=command( - BotCommands.RmSudoCommand) & CustomFilters.sudo)) -bot.add_handler(MessageHandler(addBlackList, filters=command( - BotCommands.AddBlackListCommand) & CustomFilters.sudo)) -bot.add_handler(MessageHandler(rmBlackList, filters=command( - BotCommands.RmBlackListCommand) & CustomFilters.sudo)) -bot.add_handler(MessageHandler(black_listed, filters=regex(r'^/') - & CustomFilters.authorized & CustomFilters.blacklisted)) - \ No newline at end of file + await send_message(message, "User Set Free as Bird!") + + +def get_user_id(message: diff --git a/bot/modules/broadcast.py b/bot/modules/broadcast.py index ae5b074849..e4dec09c8c 100644 --- a/bot/modules/broadcast.py +++ b/bot/modules/broadcast.py @@ -21,22 +21,22 @@ async def broadcast(_, message): bc_id, forwarded, quietly, deleted, edited = '', False, False, False, False if not DATABASE_URL: return await sendMessage(message, 'DATABASE_URL not provided!') - rply = message.reply_to_message - if len(message.command) > 1: - if not message.command[1].startswith('-'): - bc_id = message.command[1] if bc_cache.get(message.command[1], False) else '' + args = message.command[1:] + if len(args) > 0: + if not args[0].startswith('-'): + bc_id = args[0] if bc_cache.get(args[0], False) else '' if not bc_id: return await sendMessage(message, "Broadcast ID not found! After Restart, you can't edit or delete broadcasted messages...") - for arg in message.command: - if arg in ['-f', '-forward'] and rply: + for arg in args: + if arg in ['-f', '-forward'] and message.reply_to_message: forwarded = True - if arg in ['-q', '-quiet'] and rply: + if arg in ['-q', '-quiet'] and message.reply_to_message: quietly = True elif arg in ['-d', '-delete'] and bc_id: deleted = True - elif arg in ['-e', '-edit'] and bc_id and rply: + elif arg in ['-e', '-edit'] and bc_id and message.reply_to_message: edited = True - if not bc_id and not rply: + if not bc_id and not message.reply_to_message: return await sendMessage(message, '''By replying to msg to Broadcast: /broadcast bc_id -d -e -f -q @@ -58,11 +58,10 @@ async def broadcast(_, message): t, s, b, d, u = 0, 0, 0, 0, 0 if deleted: temp_wait = await sendMessage(message, 'Deleting the Broadcasted Message! Please Wait ...') - for msg in (msgs:=bc_cache[bc_id]): + for msg in (msgs:=bc_cache.get(bc_id, ())): try: await msg.delete() await sleep(0.5) - msgs.pop(msgs.index(msg)) s += 1 except: u += 1 @@ -75,16 +74,16 @@ async def broadcast(_, message): Broadcast ID: {bc_id}''') elif edited: temp_wait = await sendMessage(message, 'Editing the Broadcasted Message! Please Wait ...') - for msg in bc_cache[bc_id]: + for msg in bc_cache.get(bc_id, ()): if hasattr(msg, "forward_from"): return await editMessage(temp_wait, "Forwarded Messages can't be Edited, Only can be Deleted !") try: - await msg.edit(text=rply.text, entities=rply.entities, reply_markup=rply.reply_markup) + await msg.edit(text=message.reply_to_message.text, entities=message.reply_to_message.entities, reply_markup=message.reply_to_message.reply_markup) await sleep(0.5) s += 1 except FloodWait as e: await sleep(e.value) - await msg.edit(text=rply.text, entities=rply.entities, reply_markup=rply.reply_markup) + await msg.edit(text=message.reply_to_message.text, entities=message.reply_to_message.entities, reply_markup=message.reply_to_message.reply_markup) except: u += 1 t += 1 @@ -107,16 +106,16 @@ async def broadcast(_, message): for uid in (await DbManger().get_pm_uids()): try: if forwarded: - bc_msg = await rply.forward(uid, disable_notification=quietly) + bc_msg = await message.reply_to_message.forward(uid, disable_notification=quietly) else: - bc_msg = await rply.copy(uid, disable_notification=quietly) + bc_msg = await message.reply_to_message.copy(uid, disable_notification=quietly) s += 1 except FloodWait as e: await sleep(e.value) if forwarded: - bc_msg = await rply.forward(uid, disable_notification=quietly) + bc_msg = await message.reply_to_message.forward(uid, disable_notification=quietly) else: - bc_msg = await rply.copy(uid, disable_notification=quietly) + bc_msg = await message.reply_to_message.copy(uid, disable_notification=quietly) s += 1 except UserIsBlocked: await DbManger().rm_pm_user(uid) @@ -136,4 +135,4 @@ async def broadcast(_, message): await editMessage(pls_wait, status.format(**locals()) + f"\n\nElapsed Time: {get_readable_time(time() - start_time)}\nBroadcast ID: {bc_hash}") -bot.add_handler(MessageHandler(broadcast, filters=command(BotCommands.BroadcastCommand) & CustomFilters.sudo)) \ No newline at end of file +bot.add_handler(MessageHandler(broadcast, filters=command(BotCommands.BroadcastCommand) & CustomFilters.sudo)) diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py index cbfef3e830..9058969f8c 100644 --- a/bot/modules/cancel_mirror.py +++ b/bot/modules/cancel_mirror.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from asyncio import sleep +import asyncio from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import command, regex @@ -7,49 +7,40 @@ from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, auto_delete_message -from bot.helper.ext_utils.bot_utils import getDownloadByGid, getAllDownload, MirrorStatus, new_task +from bot.helper.ext_utils.bot_utils import get_download_by_gid, get_all_downloads, MirrorStatus, new_task from bot.helper.telegram_helper import button_build async def cancel_mirror(_, message): user_id = message.from_user.id - msg = message.text.split('_', maxsplit=1) - if len(msg) > 1: - cmd_data = msg[1].split('@', maxsplit=1) - if len(cmd_data) > 1 and cmd_data[1].strip() != bot_name: - return - gid = cmd_data[0] - dl = await getDownloadByGid(gid) - if dl is None: - await sendMessage(message, f"GID: {gid} Not Found.") - return - elif reply_to_id := message.reply_to_message_id: - async with download_dict_lock: - dl = download_dict.get(reply_to_id, None) - if dl is None: - await sendMessage(message, "This is not an active task!") - return - elif len(msg) == 1: - msg = "Reply to an active Command message which was used to start the download" \ - f" or send /{BotCommands.CancelMirror}_GID to cancel it!" - await sendMessage(message, msg) - return - if OWNER_ID != user_id and dl.message.from_user.id != user_id and \ - (user_id not in user_data or not user_data[user_id].get('is_sudo')): - await sendMessage(message, "This task is not for you!") + args = message.text.split('_', maxsplit=1) + if len(args) > 1: + gid, cmd_name = args[1].split('@', maxsplit=1) + else: + return await send_message(message, "Invalid format. Use /cancel_mirror_gid_botname or reply to an active task.") + + if cmd_name != bot_name: return - obj = dl.download() - await obj.cancel_download() + + download_info = await get_download_by_gid(gid) + if not download_info: + return await send_message(message, f"GID: `{gid}` Not Found.") + + if (user_id not in user_data or not user_data[user_id].get('is_sudo')) and download_info.message.from_user.id != user_id: + return await send_message(message, "This task is not for you!") + + download_info.download().cancel_download() async def cancel_all(status): - matches = await getAllDownload(status) + matches = await get_all_downloads(status) if not matches: return False - for dl in matches: - obj = dl.download() - await obj.cancel_download() - await sleep(1) + + for download_info in matches: + download_info.download().cancel_download() + await asyncio.sleep(1) + return True @@ -57,8 +48,8 @@ async def cancell_all_buttons(_, message): async with download_dict_lock: count = len(download_dict) if count == 0: - await sendMessage(message, "No active tasks!") - return + return await send_message(message, "No active tasks!") + buttons = button_build.ButtonMaker() buttons.ibutton("Downloading", f"canall {MirrorStatus.STATUS_DOWNLOADING}") buttons.ibutton("Uploading", f"canall {MirrorStatus.STATUS_UPLOADING}") @@ -72,7 +63,7 @@ async def cancell_all_buttons(_, message): buttons.ibutton("All", "canall all") buttons.ibutton("Close", "canall close") button = buttons.build_menu(2) - can_msg = await sendMessage(message, 'Choose tasks to cancel.', button) + can_msg = await send_message(message, 'Choose tasks to cancel.', button) await auto_delete_message(message, can_msg) @@ -83,12 +74,12 @@ async def cancel_all_update(_, query): reply_to = message.reply_to_message await query.answer() if data[1] == 'close': - await deleteMessage(reply_to) - await deleteMessage(message) + await delete_message(reply_to) + await delete_message(message) else: res = await cancel_all(data[1]) if not res: - await sendMessage(reply_to, f"No matching tasks for {data[1]}!") + await send_message(reply_to, f"No matching tasks for {data[1]}!") bot.add_handler(MessageHandler(cancel_mirror, filters=regex( diff --git a/bot/modules/category_select.py b/bot/modules/category_select.py index a4d54714bc..0e47c866c0 100644 --- a/bot/modules/category_select.py +++ b/bot/modules/category_select.py @@ -1,91 +1,75 @@ #!/usr/bin/env python3 +import re +from time import time + from pyrogram.filters import command, regex from pyrogram.handlers import CallbackQueryHandler, MessageHandler -from time import time +from pyrogram.types import CallbackQuery from bot import bot, bot_cache, categories_dict, download_dict, download_dict_lock -from bot.helper.ext_utils.bot_utils import MirrorStatus, arg_parser, fetch_user_tds, fetch_user_dumps, getDownloadByGid, is_gdrive_link, new_task, sync_to_async, get_readable_time +from bot.helper.ext_utils.bot_utils import MirrorStatus, arg_parser, fetch_user_tds, fetch_user_dumps, get_download_by_gid, is_gdrive_link, new_task, sync_to_async, get_readable_time from bot.helper.ext_utils.help_messages import CATEGORY_HELP_MESSAGE +from bot.helper.ext_utils.telegram_utils import edit_message, send_message, open_category_btns from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.message_utils import editMessage, sendMessage, open_category_btns +from bot.helper.telegram_helper.message_utils import send_message as sendMessage async def change_category(client, message): - if not message.from_user: - return user_id = message.from_user.id - - text = message.text.split('\n') - input_list = text[0].split(' ') - - arg_base = {'link': '', - '-id': '', - '-index': ''} - - args = arg_parser(input_list[1:], arg_base) - + args = arg_parser(message.text.split()[1:], {'link': '', '-id': '', '-index': ''}) + gid = args['link'] drive_id = args['-id'] index_link = args['-index'] - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - dl = None - if gid := args['link']: - dl = await getDownloadByGid(gid) - if not dl: - await sendMessage(message, f"GID: {gid} Not Found.") + dl = get_download_by_gid(gid) if gid else None + if dl is None: + reply_dl = download_dict.get(message.reply_to_message.id) + if reply_dl is None: + await send_message(message, CATEGORY_HELP_MESSAGE) return - if reply_to := message.reply_to_message: - async with download_dict_lock: - dl = download_dict.get(reply_to.id, None) - if not dl: - await sendMessage(message, "This is not an active task!") - return - if not dl: - await sendMessage(message, CATEGORY_HELP_MESSAGE) - return - if not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id: - await sendMessage(message, "This task is not for you!") + dl = reply_dl + + if dl and dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUED]: + await send_message(message, f'Task should be on {MirrorStatus.STATUS_DOWNLOADING} or {MirrorStatus.STATUS_PAUSED} or {MirrorStatus.STATUS_QUEUED}') return - if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]: - await sendMessage(message, f'Task should be on {MirrorStatus.STATUS_DOWNLOADING} or {MirrorStatus.STATUS_PAUSED} or {MirrorStatus.STATUS_QUEUEDL}') + + if dl and not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id: + await send_message(message, "This task is not for you!") return - listener = dl.listener() if dl and hasattr(dl, 'listener') else None - if listener and not listener.isLeech: + + if dl and not dl.listener.isLeech: if not index_link and not drive_id and categories_dict: drive_id, index_link, is_cancelled = await open_category_btns(message) if is_cancelled: return if not index_link and not drive_id: - return await sendMessage(message, "Time out") + return await send_message(message, "Time out") msg = 'Task has been Updated Successfully!' if drive_id: - if not (folder_name := await sync_to_async(GoogleDriveHelper().getFolderData, drive_id)): - return await sendMessage(message, "Google Drive id validation failed!!") - if listener.drive_id and listener.drive_id == drive_id: + if not (folder_name := await sync_to_async(GoogleDriveHelper().get_folder_data, drive_id)): + return await send_message(message, "Google Drive id validation failed!!") + if dl.listener.drive_id and dl.listener.drive_id == drive_id: msg += f'\n\nFolder name : {folder_name} Already selected' else: msg += f'\n\nFolder name : {folder_name}' - listener.drive_id = drive_id + dl.listener.drive_id = drive_id if index_link: - listener.index_link = index_link + dl.listener.index_link = index_link msg += f'\n\nIndex Link : {index_link}' - return await sendMessage(message, msg) + return await send_message(message, msg) else: - await sendMessage(message, "Can not change Category for this task!") + await send_message(message, "Can not change Category for this task!") @new_task -async def confirm_category(client, query): +async def confirm_category(client, query: CallbackQuery): user_id = query.from_user.id - data = query.data.split(maxsplit=3) + data = re.split(r'\s+', query.data) msg_id = int(data[2]) if msg_id not in bot_cache: - return await editMessage(query.message, 'Old Task') + return await edit_message(query.message, 'Old Task') elif user_id != int(data[1]) and not await CustomFilters.sudo(client, query): return await query.answer(text="This task is not for you!", show_alert=True) elif data[3] == "sdone": @@ -109,16 +93,16 @@ async def confirm_category(client, query): buttons.ibutton(f'{"✅️" if cat_name == _name else ""} {_name}', f"scat {user_id} {msg_id} {_name.replace(' ', '_')}") buttons.ibutton('Cancel', f'scat {user_id} {msg_id} scancel', 'footer') buttons.ibutton(f'Done ({get_readable_time(60 - (time() - bot_cache[msg_id][4]))})', f'scat {user_id} {msg_id} sdone', 'footer') - await editMessage(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec", buttons.build_menu(3)) + await edit_message(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec", buttons.build_menu(3)) @new_task -async def confirm_dump(client, query): +async def confirm_dump(client, query: CallbackQuery): user_id = query.from_user.id - data = query.data.split(maxsplit=3) + data = re.split(r'\s+', query.data) msg_id = int(data[2]) if msg_id not in bot_cache: - return await editMessage(query.message, 'Old Task') + return await edit_message(query.message, 'Old Task') elif user_id != int(data[1]) and not await CustomFilters.sudo(client, query): return await query.answer(text="This task is not for you!", show_alert=True) elif data[3] == "ddone": @@ -139,9 +123,9 @@ async def confirm_dump(client, query): buttons.ibutton('Upload in All', f'dcat {user_id} {msg_id} All', 'header') buttons.ibutton('Cancel', f'dcat {user_id} {msg_id} dcancel', 'footer') buttons.ibutton(f'Done ({get_readable_time(60 - (time() - bot_cache[msg_id][3]))})', f'dcat {user_id} {msg_id} ddone', 'footer') - await editMessage(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec", buttons.build_menu(3)) + await edit_message(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}\n\nTimeout: 60 sec", buttons.build_menu(3)) bot.add_handler(MessageHandler(change_category, filters=command(BotCommands.CategorySelect) & CustomFilters.authorized)) bot.add_handler(CallbackQueryHandler(confirm_category, filters=regex("^scat"))) -bot.add_handler(CallbackQueryHandler(confirm_dump, filters=regex("^dcat"))) \ No newline at end of file +bot.add_handler(CallbackQueryHandler(confirm_dump, filters=regex("^dcat"))) diff --git a/bot/modules/clone.py b/bot/modules/clone.py index 391ca25655..2f4b8c59cf 100644 --- a/bot/modules/clone.py +++ b/bot/modules/clone.py @@ -1,293 +1,610 @@ #!/usr/bin/env python3 +import os +import sys +import asyncio +import json +import random +import re +import shutil +from urllib.parse import urlparse +from functools import lru_cache +from typing import List, Dict, Union, Tuple, Optional +import aiofiles +import aiohttp +import pyrogram from pyrogram.handlers import MessageHandler from pyrogram.filters import command -from random import SystemRandom -from string import ascii_letters, digits -from asyncio import sleep, gather -from aiofiles.os import path as aiopath -from cloudscraper import create_scraper as cget -from json import loads, dumps as jdumps +from pyrogram.types import Message, InlineKeyboardButton, InlineKeyboardMarkup, CallbackQuery +from pyrogram.errors import FloodWait, UserIsBlocked, MessageNotModified, MessageEmpty, MessageIdInvalid, ChatAdminRequired +from cloudscraper import create_scraper +from gdown import download as gdown_download +from gdown import GDriveFileTransferError +from gdown.download import DownloadError +from gdown.gdrive import GDriveFile +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable +from gdown.service import ServiceException +from gdown.service import ServiceUnavailable -from bot import LOGGER, download_dict, download_dict_lock, categories_dict, config_dict, bot -from bot.helper.ext_utils.task_manager import limit_checker, task_utils -from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, sendStatusMessage, delete_links, auto_delete_message, open_category_btns -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.mirror_utils.status_utils.gdrive_status import GdriveStatus -from bot.helper.ext_utils.bot_utils import is_gdrive_link, new_task, get_readable_file_size, sync_to_async, fetch_user_tds, is_share_link, new_task, is_rclone_path, cmd_exec, get_telegraph_list, arg_parser -from bot.helper.ext_utils.exceptions import DirectDownloadLinkException -from bot.helper.mirror_utils.download_utils.direct_link_generator import direct_link_generator -from bot.helper.mirror_utils.rclone_utils.list import RcloneList -from bot.helper.mirror_utils.rclone_utils.transfer import RcloneTransferHelper -from bot.helper.ext_utils.help_messages import CLONE_HELP_MESSAGE -from bot.helper.mirror_utils.status_utils.rclone_status import RcloneStatus -from bot.helper.listeners.tasks_listener import MirrorLeechListener -from bot.helper.themes import BotTheme - - -async def rcloneNode(client, message, link, dst_path, rcf, tag): - if link == 'rcl': - link = await RcloneList(client, message).get_rclone_path('rcd') - if not is_rclone_path(link): - await sendMessage(message, link) - return - - if link.startswith('mrcc:'): - link = link.split('mrcc:', 1)[1] - config_path = f'rclone/{message.from_user.id}.conf' - else: - config_path = 'rclone.conf' - - if not await aiopath.exists(config_path): - await sendMessage(message, f"RClone Config: {config_path} not Exists!") - return - - if dst_path == 'rcl' or config_dict['RCLONE_PATH'] == 'rcl': - dst_path = await RcloneList(client, message).get_rclone_path('rcu', config_path) - if not is_rclone_path(dst_path): - await sendMessage(message, dst_path) - return - - dst_path = (dst_path or config_dict['RCLONE_PATH']).strip('/') - if not is_rclone_path(dst_path): - await sendMessage(message, 'Given Wrong RClone Destination!') - return - if dst_path.startswith('mrcc:'): - if config_path != f'rclone/{message.from_user.id}.conf': - await sendMessage(message, 'You should use same rclone.conf to clone between paths!') - return - elif config_path != 'rclone.conf': - await sendMessage(message, 'You should use same rclone.conf to clone between paths!') - return - - remote, src_path = link.split(':', 1) - src_path = src_path .strip('/') - - cmd = ['rclone', 'lsjson', '--fast-list', '--stat', - '--no-modtime', '--config', config_path, f'{remote}:{src_path}'] - res = await cmd_exec(cmd) - if res[2] != 0: - if res[2] != -9: - msg = f'Error: While getting RClone Stats. Path: {remote}:{src_path}. Stderr: {res[1][:4000]}' - await sendMessage(message, msg) - return - rstat = loads(res[0]) - if rstat['IsDir']: - name = src_path.rsplit('/', 1)[-1] if src_path else remote - dst_path += name if dst_path.endswith(':') else f'/{name}' - mime_type = 'Folder' - else: - name = src_path.rsplit('/', 1)[-1] - mime_type = rstat['MimeType'] - - listener = MirrorLeechListener(message, tag=tag, source_url=link) - await listener.onDownloadStart() - - RCTransfer = RcloneTransferHelper(listener, name) - LOGGER.info(f'Clone Started: Name: {name} - Source: {link} - Destination: {dst_path}') - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) - async with download_dict_lock: - download_dict[message.id] = RcloneStatus( - RCTransfer, message, gid, 'cl', listener.upload_details) - await sendStatusMessage(message) - link, destination = await RCTransfer.clone(config_path, remote, src_path, dst_path, rcf, mime_type) - if not link: - return - LOGGER.info(f'Cloning Done: {name}') - cmd1 = ['rclone', 'lsf', '--fast-list', '-R', - '--files-only', '--config', config_path, destination] - cmd2 = ['rclone', 'lsf', '--fast-list', '-R', - '--dirs-only', '--config', config_path, destination] - cmd3 = ['rclone', 'size', '--fast-list', '--json', - '--config', config_path, destination] - res1, res2, res3 = await gather(cmd_exec(cmd1), cmd_exec(cmd2), cmd_exec(cmd3)) - if res1[2] != res2[2] != res3[2] != 0: - if res1[2] == -9: - return - files = None - folders = None - size = 0 - LOGGER.error(f'Error: While getting RClone Stats. Path: {destination}. Stderr: {res1[1][:4000]}') - else: - files = len(res1[0].split("\n")) - folders = len(res2[0].split("\n")) - rsize = loads(res3[0]) - size = rsize['bytes'] - await listener.onUploadComplete(link, size, files, folders, mime_type, name, destination) - - -async def gdcloneNode(message, link, listen_up): - org_link = None - if not is_gdrive_link(link) and is_share_link(link): - org_link = link - process_msg = await sendMessage(message, f"Processing Link: {link}") - try: - link = await sync_to_async(direct_link_generator, link) - LOGGER.info(f"Generated link: {link}") - await editMessage(process_msg, f"Generated Link: {link}") - except DirectDownloadLinkException as e: - LOGGER.error(str(e)) - if str(e).startswith('ERROR:'): - await editMessage(process_msg, str(e)) - return - await deleteMessage(process_msg) - if is_gdrive_link(link): - gd = GoogleDriveHelper() - name, mime_type, size, files, _ = await sync_to_async(gd.count, link) - if org_link: - cget().request('POST', "https://wzmlcontribute.vercel.app/contribute", headers={"Content-Type": "application/json"}, data=jdumps({"name": name, "link": org_link, "size": get_readable_file_size(size)})) - if mime_type is None: - await sendMessage(message, name) - return - if config_dict['STOP_DUPLICATE']: - LOGGER.info('Checking File/Folder if already in Drive...') - telegraph_content, contents_no = await sync_to_async(gd.drive_list, name, True, True) - if telegraph_content: - msg = BotTheme('STOP_DUPLICATE', content=contents_no) - button = await get_telegraph_list(telegraph_content) - await sendMessage(message, msg, button) - return - listener = MirrorLeechListener(message, tag=listen_up[0], isClone=True, drive_id=listen_up[1], index_link=listen_up[2], source_url=org_link if org_link else link) - if limit_exceeded := await limit_checker(size, listener): - await sendMessage(listener.message, limit_exceeded) - return - await listener.onDownloadStart() - LOGGER.info(f'Clone Started: Name: {name} - Source: {link}') - drive = GoogleDriveHelper(name, listener=listener) - if files <= 20: - msg = await sendMessage(message, f"Cloning: {link}") - link, size, mime_type, files, folders = await sync_to_async(drive.clone, link, listener.drive_id) - await deleteMessage(msg) - else: - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) - async with download_dict_lock: - download_dict[message.id] = GdriveStatus( - drive, size, message, gid, 'cl', listener.upload_details) - await sendStatusMessage(message) - link, size, mime_type, files, folders = await sync_to_async(drive.clone, link, listener.drive_id) - if not link: - return - LOGGER.info(f'Cloning Done: {name}') - await listener.onUploadComplete(link, size, files, folders, mime_type, name) - else: - btn = ButtonMaker() - btn.ibutton('Click Here to Read More ..', f'wzmlx {message.from_user.id} help CLONE') - reply_message = await sendMessage(message, CLONE_HELP_MESSAGE[0], btn.build_menu(1)) - await auto_delete_message(message, reply_message) - - -@new_task -async def clone(client, message): - input_list = message.text.split(' ') - - arg_base = {'link': '', - '-i': 0, - '-up': '', '-upload': '', - '-rcf': '', - '-id': '', - '-index': '', - '-c': '', '-category': '', - } - - args = arg_parser(input_list[1:], arg_base) - - try: - multi = int(args['-i']) - except: - multi = 0 - - dst_path = args['-up'] or args['-upload'] - rcf = args['-rcf'] - link = args['link'] - drive_id = args['-id'] - index_link = args['-index'] - gd_cat = args['-c'] or args['-category'] - - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - - if not link and (reply_to := message.reply_to_message) and reply_to.text: - link = reply_to.text.split('\n', 1)[0].strip() - - @new_task - async def __run_multi(): - if multi > 1: - await sleep(5) - msg = [s.strip() for s in input_list] - index = msg.index('-i') - msg[index+1] = f"{multi - 1}" - nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1) - nextmsg = await sendMessage(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=nextmsg.id) - nextmsg.from_user = message.from_user - await sleep(5) - clone(client, nextmsg) - - __run_multi() - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if len(link) == 0: - btn = ButtonMaker() - btn.ibutton('Cʟɪᴄᴋ Hᴇʀᴇ Tᴏ Rᴇᴀᴅ Mᴏʀᴇ ...', f'wzmlx {message.from_user.id} help CLONE') - await sendMessage(message, CLONE_HELP_MESSAGE[0], btn.build_menu(1)) - await delete_links(message) - return - - error_msg = [] - error_button = None - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - - if error_msg: - final_msg = f'User : {tag}\n' - for __i, __msg in enumerate(error_msg, 1): - final_msg += f'\n{__i}: {__msg}\n' - if error_button is not None: - error_button = error_button.build_menu(2) - await sendMessage(message, final_msg, error_button) - await delete_links(message) - return - - if is_rclone_path(link): - if not await aiopath.exists('rclone.conf') and not await aiopath.exists(f'rclone/{message.from_user.id}.conf'): - await sendMessage(message, 'RClone Config Not exists!') - await delete_links(message) - return - if not config_dict['RCLONE_PATH'] and not dst_path: - await sendMessage(message, 'Destination not specified!') - await delete_links(message) - return - await rcloneNode(client, message, link, dst_path, rcf, tag) - else: - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and gd_cat: - merged_dict = {**categories_dict, **user_tds} - for drive_name, drive_dict in merged_dict.items(): - if drive_name.casefold() == gd_cat.replace('_', ' ').casefold(): - drive_id, index_link = (drive_dict['drive_id'], drive_dict['index_link']) - break - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - elif not drive_id and (len(categories_dict) > 1 and len(user_tds) == 0 or len(categories_dict) >= 1 and len(user_tds) > 1): - drive_id, index_link, is_cancelled = await open_category_btns(message) - if is_cancelled: - await delete_links(message) - return - if drive_id and not await sync_to_async(GoogleDriveHelper().getFolderData, drive_id): - return await sendMessage(message, "Google Drive ID validation failed!!") - if not config_dict['GDRIVE_ID'] and not drive_id: - await sendMessage(message, 'GDRIVE_ID not Provided!') - await delete_links(message) - return - await gdcloneNode(message, link, [tag, drive_id, index_link]) - await delete_links(message) - -bot.add_handler(MessageHandler(clone, filters=command( - BotCommands.CloneCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) diff --git a/bot/modules/gd_count.py b/bot/modules/gd_count.py index e790f1a8f6..c651e047ba 100644 --- a/bot/modules/gd_count.py +++ b/bot/modules/gd_count.py @@ -1,47 +1,72 @@ #!/usr/bin/env python3 +import asyncio +from functools import wraps +from typing import Callable, Coroutine + +import pyrogram +from pyrogram.errors import UserIsBlocked, MessageNotModified from pyrogram.handlers import MessageHandler -from pyrogram.filters import command +from pyrogram.filters import command, regex from bot import bot from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage +from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage, sendPhoto from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.ext_utils.bot_utils import is_gdrive_link, sync_to_async, new_task, get_readable_file_size +from bot.helper.ext_utils.bot_utils import is_gdrive_link, human_readable from bot.helper.themes import BotTheme -@new_task -async def countNode(_, message): +async def send_typing_action(func: Callable[[pyrogram.types.Message], Coroutine]): + @wraps(func) + async def wrapper(message: pyrogram.types.Message, *args, **kwargs): + await bot.send_chat_action(message.chat.id, "typing") + return await func(message, *args, **kwargs) + + return wrapper + + +@send_typing_action +async def count_node(client, message): args = message.text.split() - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention + username = message.from_user.username + tag = f"@{username}" if username else message.from_user.mention - link = args[1] if len(args) > 1 else '' - if len(link) == 0 and (reply_to := message.reply_to_message): - link = reply_to.text.split(maxsplit=1)[0].strip() + link = args[1] if len(args) > 1 else None + if not link: + reply_to = message.reply_to_message + if reply_to: + link = reply_to.text.split(maxsplit=1)[0].strip() if is_gdrive_link(link): + try: + await deleteMessage(message) + except MessageNotModified: + pass + msg = await sendMessage(message, BotTheme('COUNT_MSG', LINK=link)) gd = GoogleDriveHelper() - name, mime_type, size, files, folders = await sync_to_async(gd.count, link) + name, mime_type, size, files, folders = await gd.count(link) + if mime_type is None: await sendMessage(message, name) return + await deleteMessage(msg) + msg = BotTheme('COUNT_NAME', COUNT_NAME=name) - msg += BotTheme('COUNT_SIZE', COUNT_SIZE=get_readable_file_size(size)) + msg += BotTheme('COUNT_SIZE', COUNT_SIZE=human_readable(size)) msg += BotTheme('COUNT_TYPE', COUNT_TYPE=mime_type) + if mime_type == 'Folder': msg += BotTheme('COUNT_SUB', COUNT_SUB=folders) msg += BotTheme('COUNT_FILE', COUNT_FILE=files) + msg += BotTheme('COUNT_CC', COUNT_CC=tag) + await sendPhoto(message, msg, 'IMAGES') else: - msg = 'Send Gdrive link along with command or by replying to the link by command' - await sendMessage(message, msg, photo='IMAGES') + await sendMessage(message, 'Send Gdrive link along with command or by replying to the link by command', + photo='IMAGES') -bot.add_handler(MessageHandler(countNode, filters=command( - BotCommands.CountCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) +bot.add_handler(MessageHandler(count_node, filters=command(BotCommands.CountCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) diff --git a/bot/modules/gd_list.py b/bot/modules/gd_list.py index d22cf8b64a..4c9bedb8db 100644 --- a/bot/modules/gd_list.py +++ b/bot/modules/gd_list.py @@ -1,7 +1,10 @@ #!/usr/bin/env python3 -from random import choice +from typing import List, Union + +import asyncio from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import command, regex +from pyrogram.errors import FloodWait from bot import LOGGER, bot, config_dict from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper @@ -12,55 +15,58 @@ from bot.helper.ext_utils.bot_utils import sync_to_async, new_task, get_telegraph_list, checking_access from bot.helper.themes import BotTheme - -async def list_buttons(user_id, isRecursive=True): +async def list_buttons(user_id: int, is_recursive: bool = True) -> List[List[str]]: buttons = ButtonMaker() - buttons.ibutton("Only Folders", f"list_types {user_id} folders {isRecursive}") - buttons.ibutton("Only Files", f"list_types {user_id} files {isRecursive}") - buttons.ibutton("Both", f"list_types {user_id} both {isRecursive}") - buttons.ibutton(f"{'✅️' if isRecursive else ''} Recursive", f"list_types {user_id} rec {isRecursive}") + buttons.ibutton("Only Folders", f"list_types {user_id} folders {is_recursive}") + buttons.ibutton("Only Files", f"list_types {user_id} files {is_recursive}") + buttons.ibutton("Both", f"list_types {user_id} both {is_recursive}") + buttons.ibutton(f"{'✅️' if is_recursive else ''} Recursive", f"list_types {user_id} rec {is_recursive}") buttons.ibutton("Cancel", f"list_types {user_id} cancel") return buttons.build_menu(2) - -async def _list_drive(key, message, user_id, item_type, isRecursive): +async def _list_drive(key: str, message, user_id: int, item_type: str, is_recursive: bool): LOGGER.info(f"GDrive List: {key}") gdrive = GoogleDriveHelper() - telegraph_content, contents_no = await sync_to_async(gdrive.drive_list, key, isRecursive=isRecursive, itemType=item_type, userId=user_id) + try: + async with gdrive: + telegraph_content, contents_no = await sync_to_async(gdrive.drive_list, key, is_recursive=is_recursive, itemType=item_type, userId=user_id) + except Exception as e: + LOGGER.error(e) + await editMessage(message, "An error occurred while listing the drive.") + return + if telegraph_content: try: button = await get_telegraph_list(telegraph_content) except Exception as e: - await editMessage(message, e) + await editMessage(message, str(e)) return msg = BotTheme('LIST_FOUND', NO=contents_no, NAME=key) await editMessage(message, msg, button) else: await editMessage(message, BotTheme('LIST_NOT_FOUND', NAME=key)) - -@new_task async def select_type(_, query): user_id = query.from_user.id message = query.message key = message.reply_to_message.text.split(maxsplit=1)[1].strip() data = query.data.split() if user_id != int(data[1]): - return await query.answer(text="Not Yours!", show_alert=True) + await query.answer(text="Not Yours!", show_alert=True) + return elif data[2] == 'rec': + is_recursive = not bool(eval(data[3])) + buttons = await list_buttons(user_id, is_recursive) await query.answer() - isRecursive = not bool(eval(data[3])) - buttons = await list_buttons(user_id, isRecursive) return await editMessage(message, 'Choose drive list options:', buttons) elif data[2] == 'cancel': await query.answer() return await editMessage(message, "List has been canceled!") await query.answer() item_type = data[2] - isRecursive = eval(data[3]) + is_recursive = eval(data[3]) await editMessage(message, BotTheme('LIST_SEARCHING', NAME=key)) - await _list_drive(key, message, user_id, item_type, isRecursive) - + await _list_drive(key, message, user_id, item_type, is_recursive) async def drive_list(_, message): args = message.text.split() if message.text else ['/cmd'] diff --git a/bot/modules/gen_pyro_sess.py b/bot/modules/gen_pyro_sess.py index 89e1ecfadd..6c690ecc09 100644 --- a/bot/modules/gen_pyro_sess.py +++ b/bot/modules/gen_pyro_sess.py @@ -1,13 +1,16 @@ #!/usr/bin/env python3 from time import time +import asyncio from aiofiles.os import remove as aioremove -from asyncio import sleep, wrap_future, Lock +from asyncio import wrap_future, Lock from functools import partial -from pyrogram import Client -from pyrogram.filters import command, user, text, private -from pyrogram.handlers import MessageHandler -from pyrogram.errors import SessionPasswordNeeded, FloodWait, PhoneNumberInvalid, ApiIdInvalid, PhoneCodeInvalid, PhoneCodeExpired, UsernameNotOccupied, ChatAdminRequired, PeerIdInvalid +import aiogram +from aiogram.types import Message +from aiogram.filters import command, user, text, private +from aiogram.handlers import MessageHandler +from aiogram.errors import InputUserDeactivated, ChatAdminRequired, PeerIdInvalid +from aiogram.utils.exceptions import Throttled, CantParseEntities, MessageCantBeEdited, MessageToEditNotFound, MessageNotModified, TelegramAPIError, NetworkError, RetryAfter, CantParseMessage, InvalidQueryID, CantParseParam, CantParseHTTPURL, CantParsePhoneNumber, CantParseEmailAddress, CantParseUsername, CantParseHash, CantParseVersion from bot import bot, LOGGER from bot.helper.ext_utils.bot_utils import new_thread, new_task @@ -18,19 +21,18 @@ session_lock = Lock() isStop = False -@new_task -async def genPyroString(client, message): +async def genPyroString(message: Message): global isStop session_dict.clear() sess_msg = await sendMessage(message, """⌬ Pyrogram String Session Generator Send your API_ID or APP_ID. -Get from https://my.telegram.org. +Get from https://my.telegram.org. Timeout: 120s Send /stop to Stop Process""") session_dict['message'] = sess_msg - await wrap_future(invoke(client, message, 'API_ID')) + await wrap_future(invoke(message, 'API_ID')) if isStop: return async with session_lock: @@ -38,14 +40,14 @@ async def genPyroString(client, message): api_id = int(session_dict['API_ID']) except Exception: return await editMessage(sess_msg, "APP_ID is Invalid.\n\n ⌬ Process Stopped.") - await sleep(1.5) + await asyncio.sleep(1.5) await editMessage(sess_msg, """⌬ Pyrogram String Session Generator -Send your API_HASH. Get from https://my.telegram.org. +Send your API_HASH. Get from https://my.telegram.org. Timeout: 120s Send /stop to Stop Process""") - await wrap_future(invoke(client, message, 'API_HASH')) + await wrap_future(invoke(message, 'API_HASH')) if isStop: return async with session_lock: @@ -53,18 +55,18 @@ async def genPyroString(client, message): if len(api_hash) <= 30: return await editMessage(sess_msg, "API_HASH is Invalid.\n\n ⌬ Process Stopped.") while True: - await sleep(1.5) + await asyncio.sleep(1.5) await editMessage(sess_msg, """⌬ Pyrogram String Session Generator Send your Telegram Account's Phone number in International Format ( Including Country Code ). Example : +14154566376. Timeout: 120s Send /stop to Stop Process""") - await wrap_future(invoke(client, message, 'PHONE_NO')) + await wrap_future(invoke(message, 'PHONE_NO')) if isStop: return await editMessage(sess_msg, f"⌬ Verification Confirmation:\n\n Is {session_dict['PHONE_NO']} correct? (y/n/yes/no): \n\nSend y/yes (Yes) | n/no (No)") - await wrap_future(invoke(client, message, 'CONFIRM_PHN')) + await wrap_future(invoke(message, 'CONFIRM_PHN')) if isStop: return async with session_lock: @@ -82,14 +84,12 @@ async def genPyroString(client, message): await pyro_client.connect() try: user_code = await pyro_client.send_code(session_dict['PHONE_NO']) - await sleep(1.5) - except FloodWait as e: + await asyncio.sleep(1.5) + except Throttled as e: return await editMessage(sess_msg, f"Floodwait of {e.value} Seconds. Retry Again\n\n ⌬ Process Stopped.") - except ApiIdInvalid: - return await editMessage(sess_msg, "API_ID and API_HASH are Invalid. Retry Again\n\n ⌬ Process Stopped.") - except PhoneNumberInvalid: - return await editMessage(sess_msg, "Phone Number is Invalid. Retry Again\n\n ⌬ Process Stopped.") - await sleep(1.5) + except (ApiIdInvalid, PhoneNumberInvalid): + return await editMessage(sess_msg, "API_ID, API_HASH, or Phone Number are Invalid. Retry Again\n\n ⌬ Process Stopped.") + await asyncio.sleep(1.5) await editMessage(sess_msg, """⌬ Pyrogram String Session Generator OTP has been sent to your Phone Number, Enter OTP in 1 2 3 4 5 format. ( Space between each Digits ) @@ -97,7 +97,7 @@ async def genPyroString(client, message): Timeout: 120s Send /stop to Stop Process""") - await wrap_future(invoke(client, message, 'OTP')) + await wrap_future(invoke(message, 'OTP')) if isStop: return async with session_lock: @@ -109,7 +109,7 @@ async def genPyroString(client, message): except PhoneCodeExpired: return await editMessage(sess_msg, " Input OTP has Expired.\n\n ⌬ Process Stopped.") except SessionPasswordNeeded: - await sleep(1.5) + await asyncio.sleep(1.5) await editMessage(sess_msg, f"""⌬ Pyrogram String Session Generator Account is being Protected via Two-Step Verification. Send your Password below. @@ -118,7 +118,7 @@ async def genPyroString(client, message): Password Hint : {await pyro_client.get_password_hint()} Send /stop to Stop Process""") - await wrap_future(invoke(client, message, 'TWO_STEP_PASS')) + await wrap_future(invoke(message, 'TWO_STEP_PASS')) if isStop: return async with session_lock: @@ -140,9 +140,8 @@ async def genPyroString(client, message): await aioremove(f'WZML-X-{message.from_user.id}.session') await aioremove(f'WZML-X-{message.from_user.id}.session-journal') except: pass - -async def set_details(_, message, newkey): +async def set_details(_, message: Message, newkey): global isStop user_id = message.from_user.id value = message.text @@ -154,21 +153,19 @@ async def set_details(_, message, newkey): isStop = True return await editMessage(session_dict['message'], '⌬ Process Stopped') - @new_thread -async def invoke(client, message, key): +async def invoke(client, message: Message, key): global isStop user_id = message.from_user.id session_dict[user_id] = True start_time = time() handler = client.add_handler(MessageHandler(partial(set_details, newkey=key), filters=user(user_id) & text & private), group=-1) while session_dict[user_id]: - await sleep(0.5) + await asyncio.sleep(0.5) if time() - start_time > 120: session_dict[user_id] = False await editMessage(message, "⌬ Process Stopped") isStop = True client.remove_handler(*handler) - -bot.add_handler(MessageHandler(genPyroString, filters=command('exportsession') & private & CustomFilters.sudo)) \ No newline at end of file +bot.add_handler(MessageHandler(genPyroString, filters=command('exportsession') & private & CustomFilters.sudo)) diff --git a/bot/modules/images.py b/bot/modules/images.py index 29bab52293..af18cbba2e 100644 --- a/bot/modules/images.py +++ b/bot/modules/images.py @@ -1,10 +1,15 @@ #!/usr/bin/env python3 -from asyncio import sleep as asleep -from aiofiles.os import path as aiopath, remove as aioremove, mkdir -from telegraph import upload_file +import asyncio +import os +import re +from urllib.parse import urlparse +import aiofiles +import aiohttp +import telegraph from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import command, regex +from pyrogram.errors import FloodWait from bot import bot, LOGGER, config_dict, DATABASE_URL from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage @@ -14,68 +19,49 @@ from bot.helper.ext_utils.db_handler import DbManger from bot.helper.telegram_helper.button_build import ButtonMaker -@new_task async def picture_add(_, message): - resm = message.reply_to_message - editable = await sendMessage(message, "Fetching Input ...") - if len(message.command) > 1 or resm and resm.text: - msg_text = resm.text if resm else message.command[1] - if msg_text.startswith("http"): - pic_add = msg_text.strip() - await editMessage(editable, f"Adding your Link : {pic_add}") - else: - return await editMessage(editable, "Not a Valid Link, Must Start with 'http'") - elif resm and resm.photo: - if not (resm.photo and resm.photo.file_size <= 5242880*2): - return await editMessage(editable, "Media is Not Supported! Only Photos!!") - try: - photo_dir = await resm.download() - await editMessage(editable, "Now, Uploading to graph.org, Please Wait...") - await asleep(1) - pic_add = f'https://graph.org{upload_file(photo_dir)[0]}' - LOGGER.info(f"Telegraph Link : {pic_add}") - except Exception as e: - LOGGER.error(f"Images Error: {str(e)}") - await editMessage(editable, str(e)) - finally: - await aioremove(photo_dir) + editable = await sendMessage(message, "Fetching Input...") + args = message.command[1:] if message.command else message.text.split() + if len(args) < 1: + return await editMessage(editable, "Invalid input. Use /addimage [image_url] or reply to an image.") + + if re.match(r'^https?://', args[0]): + pic_add = args[0].strip() + elif message.reply_to_message and message.reply_to_message.photo: + pic_add = await download_image(message.reply_to_message) else: - help_msg = "By Replying to Link (Telegra.ph or DDL):" - help_msg += f"\n/{BotCommands.AddImageCommand}" + " {link}" + "\n" - help_msg += "\nBy Replying to Photo on Telegram:" - help_msg += f"\n/{BotCommands.AddImageCommand}" + " {photo}" + "" - return await editMessage(editable, help_msg) + return await editMessage(editable, "Invalid image URL or not a reply to an image.") + config_dict['IMAGES'].append(pic_add) if DATABASE_URL: await DbManger().update_config({'IMAGES': config_dict['IMAGES']}) - await asleep(1.5) - await editMessage(editable, f"Successfully Added to Images List!\n\n• Total Images : {len(config_dict['IMAGES'])}") + await editMessage(editable, f"Successfully added to Images List!\n• Total Images: {len(config_dict['IMAGES'])}") async def pictures(_, message): - user_id = message.from_user.id if not config_dict['IMAGES']: - await sendMessage(message, f"No Photo to Show ! Add by /{BotCommands.AddImageCommand}") - else: - to_edit = await sendMessage(message, "Generating Grid of your Images...") - buttons = ButtonMaker() - buttons.ibutton("<<", f"images {user_id} turn -1") - buttons.ibutton(">>", f"images {user_id} turn 1") - buttons.ibutton("Remove Image", f"images {user_id} remov 0") - buttons.ibutton("Close", f"images {user_id} close") - buttons.ibutton("Remove All", f"images {user_id} removall", 'footer') - await deleteMessage(to_edit) - await sendMessage(message, f'🌄 Image No. : 1 / {len(config_dict["IMAGES"])}', buttons.build_menu(2), config_dict['IMAGES'][0]) + await sendMessage(message, "No photos to show! Add photos by /addimage command.") + return + + to_edit = await sendMessage(message, "Generating grid of your images...") + buttons = ButtonMaker() + buttons.ibutton("<<", f"images {message.from_user.id} turn -1") + buttons.ibutton(">>", f"images {message.from_user.id} turn 1") + buttons.ibutton("Remove Image", f"images {message.from_user.id} remov 0") + buttons.ibutton("Close", f"images {message.from_user.id} close") + buttons.ibutton("Remove All", f"images {message.from_user.id} removall", 'footer') + await deleteMessage(to_edit) + await sendMessage(message, f'🌄 Image No. : 1 / {len(config_dict["IMAGES"])}', buttons.build_menu(2), config_dict['IMAGES'][0]) @new_task async def pics_callback(_, query): - message = query.message user_id = query.from_user.id data = query.data.split() if user_id != int(data[1]): await query.answer(text="Not Authorized User!", show_alert=True) return + if data[2] == "turn": await query.answer() ind = handleIndex(int(data[3]), config_dict['IMAGES']) @@ -87,15 +73,15 @@ async def pics_callback(_, query): buttons.ibutton("Remove Image", f"images {data[1]} remov {ind}") buttons.ibutton("Close", f"images {data[1]} close") buttons.ibutton("Remove All", f"images {data[1]} removall", 'footer') - await editMessage(message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind]) + await editMessage(query.message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind]) elif data[2] == "remov": config_dict['IMAGES'].pop(int(data[3])) if DATABASE_URL: await DbManger().update_config({'IMAGES': config_dict['IMAGES']}) - query.answer("Image Successfully Deleted", show_alert=True) + await query.answer("Image Successfully Deleted", show_alert=True) if len(config_dict['IMAGES']) == 0: await deleteMessage(query.message) - await sendMessage(message, f"No Photo to Show ! Add by /{BotCommands.AddImageCommand}") + await sendMessage(query.message, "No photos to show! Add photos by /addimage command.") return ind = int(data[3])+1 ind = len(config_dict['IMAGES']) - abs(ind) if ind < 0 else ind @@ -106,19 +92,39 @@ async def pics_callback(_, query): buttons.ibutton("Remove Image", f"images {data[1]} remov {ind}") buttons.ibutton("Close", f"images {data[1]} close") buttons.ibutton("Remove All", f"images {data[1]} removall", 'footer') - await editMessage(message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind]) + await editMessage(query.message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind]) elif data[2] == 'removall': config_dict['IMAGES'].clear() if DATABASE_URL: await DbManger().update_config({'IMAGES': config_dict['IMAGES']}) await query.answer("All Images Successfully Deleted", show_alert=True) - await sendMessage(message, f"No Images to Show ! Add by /{BotCommands.AddImageCommand}") - await deleteMessage(message) + await sendMessage(query.message, "No images to show! Add photos by /addimage command.") + await deleteMessage(query.message) else: await query.answer() - await deleteMessage(message) - if message.reply_to_message: - await deleteMessage(message.reply_to_message) + await deleteMessage(query.message) + + +async def download_image(message): + try: + file_path = await message.download() + except FloodWait as e: + await asyncio.sleep(e.x) + file_path = await message.download() + except Exception as e: + LOGGER.error(f"Error downloading image: {str(e)}") + return None + + url = urlparse(message.reply_to_message.photo.file_unique_id) + file_name = f"{url.hostname}.jpg" + new_file_path = os.path.join("temp_images", file_name) + os.makedirs(os.path.dirname(new_file_path), exist_ok=True) + async with aiofiles.open(file_path, 'rb') as f: + async with aiofiles.open(new_file_path, 'wb') as out_f: + while content := await f.read(4096): + await out_f.write(content) + os.remove(file_path) + return new_file_path bot.add_handler(MessageHandler(picture_add, filters=command(BotCommands.AddImageCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) diff --git a/bot/modules/imdb.py b/bot/modules/imdb.py index 2cf008de69..e2c5ebf6c2 100644 --- a/bot/modules/imdb.py +++ b/bot/modules/imdb.py @@ -1,11 +1,12 @@ #!/usr/bin/env python3 -from re import findall, IGNORECASE -from imdb import Cinemagoer -from pycountry import countries as conn +import re +from typing import List, Dict, Union +from urllib.parse import urlparse +import requests from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import command, regex -from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup +from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, InputMediaPhoto from pyrogram.errors import MediaEmpty, PhotoInvalidDimensions, WebpageMediaEmpty from bot import bot, LOGGER, user_data, config_dict @@ -15,233 +16,140 @@ from bot.helper.ext_utils.bot_utils import get_readable_time from bot.helper.telegram_helper.button_build import ButtonMaker -imdb = Cinemagoer() - -IMDB_GENRE_EMOJI = {"Action": "🚀", "Adult": "🔞", "Adventure": "🌋", "Animation": "🎠", "Biography": "📜", "Comedy": "🪗", "Crime": "🔪", "Documentary": "🎞", "Drama": "🎭", "Family": "👨‍👩‍👧‍👦", "Fantasy": "🫧", "Film Noir": "🎯", "Game Show": "🎮", "History": "🏛", "Horror": "🧟", "Musical": "🎻", "Music": "🎸", "Mystery": "🧳", "News": "📰", "Reality-TV": "🖥", "Romance": "🥰", "Sci-Fi": "🌠", "Short": "📝", "Sport": "⛳", "Talk-Show": "👨‍🍳", "Thriller": "🗡", "War": "⚔", "Western": "🪩"} +IMDB_GENRE_EMOJI = { + "Action": "🚀", + "Adult": "🔞", + "Adventure": "🌋", + "Animation": "🎠", + "Biography": "📜", + "Comedy": "🪗", + "Crime": "🔪", + "Documentary": "🎞", + "Drama": "🎭", + "Family": "👨‍👩‍👧‍👦", + "Fantasy": "🫧", + "Film Noir": "🎯", + "Game Show": "🎮", + "History": "🏛", + "Horror": "🧟", + "Musical": "🎻", + "Music": "🎸", + "Mystery": "🧳", + "News": "📰", + "Reality-TV": "🖥", + "Romance": "🥰", + "Sci-Fi": "🌠", + "Short": "📝", + "Sport": "⛳", + "Talk-Show": "👨‍🍳", + "Thriller": "🗡", + "War": "⚔", + "Western": "🪩", +} LIST_ITEMS = 4 -async def imdb_search(_, message): - if ' ' in message.text: - k = await sendMessage(message, 'Searching IMDB ...') - title = message.text.split(' ', 1)[1] - user_id = message.from_user.id - buttons = ButtonMaker() - if title.lower().startswith("https://www.imdb.com/title/tt"): - movieid = title.replace("https://www.imdb.com/title/tt", "") - movie = imdb.get_movie(movieid) - if not movie: - return await editMessage(k, "No Results Found") - buttons.ibutton(f"🎬 {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movieid}") - else: - movies = get_poster(title, bulk=True) - if not movies: - return editMessage("No Results Found, Try Again or Use Title ID", k) - for movie in movies: # Refurbished Soon !! - buttons.ibutton(f"🎬 {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movie.movieID}") - buttons.ibutton("🚫 Close 🚫", f"imdb {user_id} close") - await editMessage(k, 'Here What I found on IMDb.com', buttons.build_menu(1)) - else: - await sendMessage(message, 'Send Movie / TV Series Name along with /imdb Command or send IMDB URL') - - -def get_poster(query, bulk=False, id=False, file=None): - if not id: - query = (query.strip()).lower() - title = query - year = findall(r'[1-2]\d{3}$', query, IGNORECASE) - if year: - year = list_to_str(year[:1]) - title = (query.replace(year, "")).strip() - elif file is not None: - year = findall(r'[1-2]\d{3}', file, IGNORECASE) - if year: - year = list_to_str(year[:1]) - else: - year = None - movieid = imdb.search_movie(title.lower(), results=10) - if not movieid: - return None - if year: - filtered=list(filter(lambda k: str(k.get('year')) == str(year), movieid)) - if not filtered: - filtered = movieid - else: - filtered = movieid - movieid=list(filter(lambda k: k.get('kind') in ['movie', 'tv series'], filtered)) - if not movieid: - movieid = filtered - if bulk: - return movieid - movieid = movieid[0].movieID - else: - movieid = query - movie = imdb.get_movie(movieid) - if movie.get("original air date"): - date = movie["original air date"] - elif movie.get("year"): - date = movie.get("year") - else: - date = "N/A" - plot = movie.get('plot') - if plot and len(plot) > 0: - plot = plot[0] - else: - plot = movie.get('plot outline') - if plot and len(plot) > 300: - plot = f"{plot[:300]}..." - return { - 'title': movie.get('title'), - 'trailer': movie.get('videos'), - 'votes': movie.get('votes'), - "aka": list_to_str(movie.get("akas")), - "seasons": movie.get("number of seasons"), - "box_office": movie.get('box office'), - 'localized_title': movie.get('localized title'), - 'kind': movie.get("kind"), - "imdb_id": f"tt{movie.get('imdbID')}", - "cast": list_to_str(movie.get("cast")), - "runtime": list_to_str([get_readable_time(int(run) * 60) for run in movie.get("runtimes", "0")]), - "countries": list_to_hash(movie.get("countries"), True), - "certificates": list_to_str(movie.get("certificates")), - "languages": list_to_hash(movie.get("languages")), - "director": list_to_str(movie.get("director")), - "writer":list_to_str(movie.get("writer")), - "producer":list_to_str(movie.get("producer")), - "composer":list_to_str(movie.get("composer")) , - "cinematographer":list_to_str(movie.get("cinematographer")), - "music_team": list_to_str(movie.get("music department")), - "distributors": list_to_str(movie.get("distributors")), - 'release_date': date, - 'year': movie.get('year'), - 'genres': list_to_hash(movie.get("genres"), emoji=True), - 'poster': movie.get('full-size cover url'), - 'plot': plot, - 'rating': str(movie.get("rating"))+" / 10", - 'url':f'https://www.imdb.com/title/tt{movieid}', - 'url_cast':f'https://www.imdb.com/title/tt{movieid}/fullcredits#cast', - 'url_releaseinfo':f'https://www.imdb.com/title/tt{movieid}/releaseinfo', - } +def get_imdb_id(url: str) -> str: + if "imdb.com/title/tt" in url: + return url.split("imdb.com/title/tt")[-1] + return "" -def list_to_str(k): - if not k: - return "" - elif len(k) == 1: - return str(k[0]) - elif LIST_ITEMS: - k = k[:int(LIST_ITEMS)] - return ' '.join(f'{elem},' for elem in k)[:-1]+' ...' - else: - return ' '.join(f'{elem},' for elem in k)[:-1] +async def get_imdb_data(query: str) -> Union[Dict, None]: + if "http" not in query and "https" not in query: + query = f"https://www.imdb.com/find?q={query}&s=tt&ttype=ft&ref_=fn_ft" + try: + response = requests.get(query) + if response.status_code == 200: + html_content = response.text + start_index = html_content.index('"poster":"') + len('"poster":"') + end_index = html_content.index('","image"', start_index) + poster_url = html_content[start_index:end_index].replace("\\/", "/") + start_index = html_content.index('"title":"') + len('"title":"') + end_index = html_content.index('","year"', start_index) + title = html_content[start_index:end_index] + start_index = html_content.index('"year":"') + len('"year":"') + end_index = html_content.index('","id"', start_index) + year = html_content[start_index:end_index] + start_index = html_content.index('"id":"') + len('"id":"') + end_index = html_content.index('","type"', start_index) + imdb_id = html_content[start_index:end_index] + return { + "poster": poster_url, + "title": title, + "year": year, + "imdb_id": imdb_id, + } + except Exception as e: + LOGGER.error(e) + return None -def list_to_hash(k, flagg=False, emoji=False): - listing = "" - if not k: - return "" - elif len(k) == 1: - if not flagg: - if emoji: - return str(IMDB_GENRE_EMOJI.get(k[0], '')+" #"+k[0].replace(" ", "_").replace("-", "_")) - return str("#"+k[0].replace(" ", "_").replace("-", "_")) - try: - conflag = (conn.get(name=k[0])).flag - return str(f"{conflag} #" + k[0].replace(" ", "_").replace("-", "_")) - except AttributeError: - return str("#"+k[0].replace(" ", "_").replace("-", "_")) - elif LIST_ITEMS: - k = k[:int(LIST_ITEMS)] - for elem in k: - ele = elem.replace(" ", "_").replace("-", "_") - if flagg: - try: - conflag = (conn.get(name=elem)).flag - listing += f'{conflag} ' - except AttributeError: - pass - if emoji: - listing += f"{IMDB_GENRE_EMOJI.get(elem, '')} " - listing += f'#{ele}, ' - return f'{listing[:-2]}' +async def imdb_search(client, message): + if " " not in message.text: + await sendMessage(message, 'Send Movie / TV Series Name along with /imdb Command or send IMDB URL') + return + query = message.text.split(" ", 1)[1] + user_id = message.from_user.id + buttons = ButtonMaker() + if "http" in query or "https" in query: + imdb_id = get_imdb_id(query) + if not imdb_id: + await sendMessage(message, "Invalid IMDB URL") + return + movie_data = await get_imdb_data(f"https://www.imdb.com/title/{imdb_id}/") + if not movie_data: + await sendMessage(message, "No results found") + return else: - for elem in k: - ele = elem.replace(" ", "_").replace("-", "_") - if flagg: - conflag = (conn.get(name=elem)).flag - listing += f'{conflag} ' - listing += f'#{ele}, ' - return listing[:-2] - + movie_data = await get_imdb_data(f"https://www.imdb.com/find?q={query}&s=tt&ttype=ft&ref_=fn_ft") + if not movie_data: + await sendMessage(message, "No results found") + return + buttons.ibutton(f"🎬 {movie_data['title']} ({movie_data['year']})", f"imdb {user_id} movie {movie_data['imdb_id']}") + buttons.ibutton("🚫 Close 🚫", f"imdb {user_id} close") + await editMessage(message, 'Here What I found on IMDb.com', buttons.build_menu(1)) -async def imdb_callback(_, query): +async def imdb_callback(client, query): message = query.message user_id = query.from_user.id data = query.data.split() if user_id != int(data[1]): await query.answer("Not Yours!", show_alert=True) + return elif data[2] == "movie": await query.answer() - imdb = get_poster(query=data[3], id=True) + movie_id = data[3] + movie_data = await get_imdb_data(f"https://www.imdb.com/title/{movie_id}/") + if not movie_data: + await query.answer("No results found", show_alert=True) + return buttons = [] - if imdb['trailer']: - if isinstance(imdb['trailer'], list): - buttons.append([InlineKeyboardButton("▶️ IMDb Trailer ", url=str(imdb['trailer'][-1]))]) - imdb['trailer'] = list_to_str(imdb['trailer']) - else: buttons.append([InlineKeyboardButton("▶️ IMDb Trailer ", url=str(imdb['trailer']))]) - buttons.append([InlineKeyboardButton("🚫 Close 🚫", callback_data=f"imdb {user_id} close")]) - template = '' - #if int(data[1]) in user_data and user_data[int(data[1])].get('imdb_temp'): - # template = user_data[int(data[1])].get('imdb_temp') - #if not template: - template = config_dict['IMDB_TEMPLATE'] - if imdb and template != "": - cap = template.format( - title = imdb['title'], - trailer = imdb['trailer'], - votes = imdb['votes'], - aka = imdb["aka"], - seasons = imdb["seasons"], - box_office = imdb['box_office'], - localized_title = imdb['localized_title'], - kind = imdb['kind'], - imdb_id = imdb["imdb_id"], - cast = imdb["cast"], - runtime = imdb["runtime"], - countries = imdb["countries"], - certificates = imdb["certificates"], - languages = imdb["languages"], - director = imdb["director"], - writer = imdb["writer"], - producer = imdb["producer"], - composer = imdb["composer"], - cinematographer = imdb["cinematographer"], - music_team = imdb["music_team"], - distributors = imdb["distributors"], - release_date = imdb['release_date'], - year = imdb['year'], - genres = imdb['genres'], - poster = imdb['poster'], - plot = imdb['plot'], - rating = imdb['rating'], - url = imdb['url'], - url_cast = imdb['url_cast'], - url_releaseinfo = imdb['url_releaseinfo'], - **locals() - ) - else: - cap = "No Results" - if imdb.get('poster'): + if movie_data.get("poster"): try: - await bot.send_photo(chat_id=query.message.reply_to_message.chat.id, caption=cap, photo=imdb['poster'], reply_to_message_id=query.message.reply_to_message.id, reply_markup=InlineKeyboardMarkup(buttons)) + await bot.send_photo( + chat_id=query.message.reply_to_message.chat.id, + caption=movie_data["title"], + photo=movie_data["poster"], + reply_to_message_id=query.message.reply_to_message.id, + reply_markup=InlineKeyboardMarkup(buttons), + ) except (MediaEmpty, PhotoInvalidDimensions, WebpageMediaEmpty): - poster = imdb.get('poster').replace('.jpg', "._V1_UX360.jpg") - await sendMessage(message.reply_to_message, cap, InlineKeyboardMarkup(buttons), poster) + await sendMessage( + message.reply_to_message, + movie_data["title"], + InlineKeyboardMarkup(buttons), + movie_data["poster"], + ) else: - await sendMessage(message.reply_to_message, cap, InlineKeyboardMarkup(buttons), 'https://telegra.ph/file/5af8d90a479b0d11df298.jpg') + await sendMessage( + message.reply_to_message, + movie_data["title"], + InlineKeyboardMarkup(buttons), + 'https://telegra.ph/file/5af8d90a479b0d11df298.jpg', + ) await message.delete() else: await query.answer() await query.message.delete() await query.message.reply_to_message.delete() - bot.add_handler(MessageHandler(imdb_search, filters=command(BotCommands.IMDBCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) bot.add_handler(CallbackQueryHandler(imdb_callback, filters=regex(r'^imdb'))) diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py index 5f78900481..f95d319413 100644 --- a/bot/modules/mediainfo.py +++ b/bot/modules/mediainfo.py @@ -1,13 +1,15 @@ #!/usr/bin/env python3 -import aiohttp -from re import search as re_search -from shlex import split as ssplit -from aiofiles import open as aiopen -from aiofiles.os import remove as aioremove, path as aiopath, mkdir -from os import path as ospath, getcwd +import asyncio +import os +import re +import shlex +from pathlib import Path -from pyrogram.handlers import MessageHandler +import aiohttp +import aiofiles +from pyrogram.handlers import MessageHandler from pyrogram.filters import command +from pyrogram.errors import UserIsBlocked, MessageNotModified, ChatWriteForbidden from bot import LOGGER, bot, config_dict from bot.helper.telegram_helper.filters import CustomFilters @@ -16,81 +18,28 @@ from bot.helper.ext_utils.bot_utils import cmd_exec from bot.helper.ext_utils.telegraph_helper import telegraph - -async def gen_mediainfo(message, link=None, media=None, mmsg=None): - temp_send = await sendMessage(message, 'Generating MediaInfo...') +MEDIAINFO_PATH = "Mediainfo/" + +async def download_file(session, url, file_path): + async with session.get(url, headers={"user-agent": "Mozilla/5.0"}) as response: + if response.status != 200: + raise Exception(f"Failed to download file: {response.status}") + async with aiofiles.open(file_path, "wb") as f: + while True: + chunk = await response.content.read(10000000) + if not chunk: + break + await f.write(chunk) + +async def generate_mediainfo(message, link=None, media=None, mmsg=None): + temp_send = await sendMessage(message, "Generating MediaInfo...") try: - path = "Mediainfo/" - if not await aiopath.isdir(path): - await mkdir(path) if link: - filename = re_search(".+/(.+)", link).group(1) - des_path = ospath.join(path, filename) - headers = {"user-agent":"Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36"} + file_name = re.search(".+/(.+)", link).group(1) + file_path = Path(MEDIAINFO_PATH) / file_name async with aiohttp.ClientSession() as session: - async with session.get(link, headers=headers) as response: - async with aiopen(des_path, "wb") as f: - async for chunk in response.content.iter_chunked(10000000): - await f.write(chunk) - break + await download_file(session, link, file_path) elif media: - des_path = ospath.join(path, media.file_name) + file_path = Path(MEDIAINFO_PATH) / media.file_name if media.file_size <= 50000000: - await mmsg.download(ospath.join(getcwd(), des_path)) - else: - async for chunk in bot.stream_media(media, limit=5): - async with aiopen(des_path, "ab") as f: - await f.write(chunk) - stdout, _, _ = await cmd_exec(ssplit(f'mediainfo "{des_path}"')) - tc = f"

📌 {ospath.basename(des_path)}



" - if len(stdout) != 0: - tc += parseinfo(stdout) - except Exception as e: - LOGGER.error(e) - await editMessage(temp_send, f"MediaInfo Stopped due to {str(e)}") - finally: - await aioremove(des_path) - link_id = (await telegraph.create_page(title='MediaInfo X', content=tc))["path"] - await temp_send.edit(f"MediaInfo:\n\n➲ Link : https://graph.org/{link_id}", disable_web_page_preview=False) - - -section_dict = {'General': '🗒', 'Video': '🎞', 'Audio': '🔊', 'Text': '🔠', 'Menu': '🗃'} -def parseinfo(out): - tc = '' - trigger = False - for line in out.split('\n'): - for section, emoji in section_dict.items(): - if line.startswith(section): - trigger = True - if not line.startswith('General'): - tc += '
' - tc += f"

{emoji} {line.replace('Text', 'Subtitle')}

" - break - if trigger: - tc += '
'
-            trigger = False
-        else:
-            tc += line + '\n'
-    tc += '

' - return tc - - -async def mediainfo(_, message): - rply = message.reply_to_message - help_msg = "By replying to media:" - help_msg += f"\n/{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]}" + " {media}" + "" - help_msg += "\n\nBy reply/sending download link:" - help_msg += f"\n/{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]}" + " {link}" + "" - if len(message.command) > 1 or rply and rply.text: - link = rply.text if rply else message.command[1] - return await gen_mediainfo(message, link) - elif rply: - file = next((i for i in [rply.document, rply.video, rply.audio, rply.voice, - rply.animation, rply.video_note] if i is not None), None) - if not file: - return await sendMessage(message, help_msg) - return await gen_mediainfo(message, None, file, rply) - else: - return await sendMessage(message, help_msg) - -bot.add_handler(MessageHandler(mediainfo, filters=command(BotCommands.MediaInfoCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) + diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py index 42ce65b952..7da3569a68 100644 --- a/bot/modules/mirror_leech.py +++ b/bot/modules/mirror_leech.py @@ -3,6 +3,7 @@ from html import escape from base64 import b64encode from re import match as re_match +from urllib.parse import unquote from asyncio import sleep from aiofiles import open as aiopen from aiofiles.os import path as aiopath @@ -223,9 +224,9 @@ async def __run_multi(): await delete_links(message) return - org_link = None + org_link, headers, multiAria = None, '', [] if link: - LOGGER.info(link) + LOGGER.info(f"Link: {link}") org_link = link if not is_mega_link(link) and not isQbit and not is_magnet(link) and not is_rclone_path(link) \ @@ -235,6 +236,14 @@ async def __run_multi(): process_msg = await sendMessage(message, f"Processing: {link}") try: link = await sync_to_async(direct_link_generator, link) + if isinstance(link, list): + link, headers = link + if isinstance(link, dict): + multiAria = [link, headers, unquote(org_link.rstrip('/').rsplit('/', 1)[1])] + link = list(multiAria[0].keys())[0] + if (folder_name := multiAria[0][link]): + path += "/" + folder_name + multiAria[0].pop(link) LOGGER.info(f"Generated link: {link}") await editMessage(process_msg, f"Generated link: {link}") except DirectDownloadLinkException as e: @@ -318,7 +327,8 @@ async def __run_multi(): return listener = MirrorLeechListener(message, compress, extract, isQbit, isLeech, tag, select, seed, - sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link, source_url=org_link if org_link else link) + sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link, + source_url=org_link if org_link else link, multiAria=multiAria) if file_ is not None: await delete_links(message) @@ -347,10 +357,8 @@ async def __run_multi(): pssw = args['-p'] or args['-pass'] if ussr or pssw: auth = f"{ussr}:{pssw}" - auth = "Basic " + b64encode(auth.encode()).decode('ascii') - else: - auth = '' - await add_aria2c_download(link, path, listener, name, auth, ratio, seed_time) + headers = f"authorization: Basic {b64encode(auth.encode()).decode('ascii')}" + await add_aria2c_download(link, path, listener, name, headers, ratio, seed_time) await delete_links(message) diff --git a/bot/modules/save_msg.py b/bot/modules/save_msg.py index fdb65076e4..5385af1cd0 100644 --- a/bot/modules/save_msg.py +++ b/bot/modules/save_msg.py @@ -1,26 +1,39 @@ #!/usr/bin/env python3 -from pyrogram.types import InlineKeyboardMarkup +from pyrogram.types import InlineKeyboardMarkup, CallbackQuery from pyrogram.handlers import CallbackQueryHandler from pyrogram.filters import regex from asyncio import sleep +from typing import Optional from bot import bot, bot_name, user_data -async def save_message(_, query): - usr = query.from_user.id - user_dict = user_data.get(usr, {}) +async def save_message(query: CallbackQuery) -> None: + """Save the current message/media to the user's chat.""" + + user_id = query.from_user.id + user_dict = user_data.get(user_id, {}) + if query.data == "save": - if user_dict.get('save_mode'): - usr = next(iter(user_dict.get('ldump', {}).values())) try: - await query.message.copy(usr, reply_markup=InlineKeyboardMarkup(BTN) if (BTN := query.message.reply_markup.inline_keyboard[:-1]) else None) - await query.answer("Message/Media Successfully Saved !", show_alert=True) - except: - if user_dict.get('save_mode'): - await query.answer('Make Bot as Admin and give Post Permissions and Try Again', show_alert=True) + save_mode = user_dict.get('save_mode') + if save_mode: + user_to_save_to = next(iter(user_dict.get('ldump', {}).values())) else: - await query.answer(url=f"https://t.me/{bot_name}?start=start") + raise ValueError("Save mode not enabled.") + except (StopIteration, KeyError) as e: + await query.answer("An error occurred while saving the message.", show_alert=True) + return + + try: + reply_markup = query.message.reply_markup + keyboard = InlineKeyboardMarkup(inline_keyboard := reply_markup.inline_keyboard[:-1]) if reply_markup else None + await query.message.copy(user_to_save_to, reply_markup=keyboard) + await query.answer("Message/Media successfully saved!", show_alert=True) + except Exception as e: + if save_mode: + await query.answer("Make the bot an admin and give it post permissions.", show_alert=True) + else: + url = f"https://t.me/{bot_name}?start=start" + await query.answer(url, show_alert=True) await sleep(1) - await query.message.copy(usr, reply_markup=InlineKeyboardMarkup(BTN) if (BTN := query.message.reply_markup.inline_keyboard[:-1]) else None) - -bot.add_handler(CallbackQueryHandler(save_message, filters=regex(r"^save"))) + await query.message.copy(user_to_save_to, reply_markup=keyboard) diff --git a/bot/modules/speedtest.py b/bot/modules/speedtest.py index 11e9753eaf..e6ae5a25dd 100644 --- a/bot/modules/speedtest.py +++ b/bot/modules/speedtest.py @@ -1,9 +1,16 @@ #!/usr/bin/env python3 -from speedtest import Speedtest +import asyncio +import os +from urllib.parse import urlparse + +import aiohttp +import requests +from PIL import Image from pyrogram.handlers import MessageHandler from pyrogram.filters import command +from speedtest import Speedtest -from bot import bot, LOGGER +from bot import bot, LOGGER, SUPPORT_CHAT, WHITELIST_CHATS from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, editMessage @@ -45,11 +52,39 @@ async def speedtest(_, message): ┖ ISP Rating: {result['client']['isprating']} ''' try: - pho = await sendMessage(message, string_speed, photo=path) + # Download the image using aiohttp + async with aiohttp.ClientSession() as session: + async with session.get(path) as resp: + if resp.status != 200: + LOGGER.error(f"Failed to download image: {resp.status}") + return + jpg_data = await resp.read() + + # Save the image temporarily + temp_file = "temp_image.jpg" + with open(temp_file, "wb") as f: + f.write(jpg_data) + + # Convert the image to a Telegram-friendly format + image = Image.open(temp_file) + img_bytes = await convert_image_to_telegram_format(image) + + # Send the message with the image + pho = await sendMessage(message, string_speed, photo=img_bytes) + os.remove(temp_file) await deleteMessage(speed) except Exception as e: LOGGER.error(str(e)) pho = await editMessage(speed, string_speed) + +async def convert_image_to_telegram_format(image): + """Convert the image to a format suitable for sending via Telegram.""" + img_bytes = await loop.run_in_executor(None, functools.partial(image.tobytes)) + img_data = io.BytesIO(img_bytes) + img_data.seek(0) + return img_data + + bot.add_handler(MessageHandler(speedtest, filters=command( BotCommands.SpeedCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) diff --git a/bot/modules/status.py b/bot/modules/status.py index 10d53836e4..5763288777 100644 --- a/bot/modules/status.py +++ b/bot/modules/status.py @@ -1,60 +1,71 @@ #!/usr/bin/env python3 +from typing import Coroutine + +import asyncio +import time from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from pyrogram.filters import command, regex +from pyrogram.filters import ChatUpdate, Text, regex +from pyrogram.types import Message, CallbackQuery from psutil import cpu_percent, virtual_memory, disk_usage -from time import time from asyncio import sleep - -from bot import bot_cache, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, Interval, config_dict, bot +from bot import bot_cache, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, Interval, config_dict, Client as bot from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, auto_delete_message, sendStatusMessage, user_info, update_all_messages, delete_all_messages from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time, turn_page, setInterval, new_task from bot.helper.themes import BotTheme - @new_task -async def mirror_status(_, message): - async with download_dict_lock: - count = len(download_dict) - if count == 0: - currentTime = get_readable_time(time() - botStartTime) - free = get_readable_file_size(disk_usage(config_dict['DOWNLOAD_DIR']).free) - msg = BotTheme('NO_ACTIVE_DL', cpu=cpu_percent(), free=free, free_p=round(100-disk_usage(config_dict['DOWNLOAD_DIR']).percent, 1), - ram=virtual_memory().percent, uptime=currentTime) - reply_message = await sendMessage(message, msg) - await auto_delete_message(message, reply_message) - else: - await sendStatusMessage(message) - await deleteMessage(message) - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - Interval.append(setInterval(config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages)) - - -@new_task -async def status_pages(_, query): - user_id = query.from_user.id - data = query.data.split() - if data[1] == 'ref': - bot_cache.setdefault('status_refresh', {}) - if user_id in (refresh_status := bot_cache['status_refresh']) and (curr := (time() - refresh_status[user_id])) < 7: - return await query.answer(f'Already Refreshed! Try after {get_readable_time(7 - curr)}', show_alert=True) +async def mirror_status(context: Coroutine, message: Message): + """ + Handles the /status command and sends the current status of the bot. + """ + try: + async with download_dict_lock: + count = len(download_dict) + if count == 0: + currentTime = get_readable_time(time() - botStartTime) + free = get_readable_file_size(disk_usage(config_dict['DOWNLOAD_DIR']).free) + msg = BotTheme('NO_ACTIVE_DL', cpu=cpu_percent(), free=free, free_p=round(100-disk_usage(config_dict['DOWNLOAD_DIR']).percent, 1), + ram=virtual_memory().percent, uptime=currentTime) + reply_message = await sendMessage(message, msg) + await auto_delete_message(message, reply_message) else: - refresh_status[user_id] = time() - await editMessage(query.message, f"{(await user_info(user_id)).mention(style='html')}, Refreshing Status...") - await sleep(1.5) - await update_all_messages(True) - elif data[1] in ['nex', 'pre']: - await turn_page(data) - await update_all_messages(True) - elif data[1] == 'close': - await delete_all_messages() - await query.answer() + await sendStatusMessage(message) + await deleteMessage(message) + async with status_reply_dict_lock: + if Interval: + Interval[0].cancel() + Interval.clear() + Interval.append(setInterval(config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages)) + except Exception as e: + print(f"Error in mirror_status: {e}") +@new_task +async def status_pages(context: Coroutine, query: CallbackQuery): + """ + Handles the status callback queries and updates the status messages accordingly. + """ + try: + user_id = query.from_user.id + data = query.data.split() + if data[1] == 'ref': + bot_cache.setdefault('status_refresh', {}) + if user_id in (refresh_status := bot_cache['status_refresh']) and (curr := (time() - refresh_status[user_id])) < 7: + return await query.answer(f'Already Refreshed! Try after {get_readable_time(7 - curr)}', show_alert=True) + else: + refresh_status[user_id] = time() + await editMessage(query.message, f"{(await user_info(user_id)).mention(style='html')}, Refreshing Status...") + await sleep(1.5) + await update_all_messages(True) + elif data[1] in ['nex', 'pre']: + await turn_page(data) + await update_all_messages(True) + elif data[1] == 'close': + await delete_all_messages() + await query.answer() + except Exception as e: + print(f"Error in status_pages: {e}") -bot.add_handler(MessageHandler(mirror_status, filters=command( - BotCommands.StatusCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) -bot.add_handler(CallbackQueryHandler(status_pages, filters=regex("^status"))) +bot.add_handler(MessageHandler(mirror_status, filters=ChatUpdate(BotCommands.StatusCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) +bot.add_handler(CallbackQueryHandler(status_pages, pattern="status")) diff --git a/bot/modules/torrent_select.py b/bot/modules/torrent_select.py index fa821394fd..ebc515b632 100644 --- a/bot/modules/torrent_select.py +++ b/bot/modules/torrent_select.py @@ -1,54 +1,74 @@ #!/usr/bin/env python3 +from typing import Coroutine, Final, List, Optional + +import aiofiles.os as aiofiles +import pyrogram.filters from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from pyrogram.filters import regex -from aiofiles.os import remove as aioremove, path as aiopath +from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, Message + +import bot.helper.telegram_helper.bot_commands as BotCommands +from bot.helper.telegram_helper.bot_utils import get_download_by_gid +from bot.helper.telegram_helper.message_utils import delete_message, send_message, send_status_message +from bot.helper.ext_utils.bot_utils import is_sudo_user, sync_to_async + +from .mirror_status import MirrorStatus +from .aria2_manager import Aria2Manager +from .qbittorrent_manager import QBittorrentManager -from bot import bot, bot_name, aria2, download_dict, download_dict_lock, OWNER_ID, user_data, LOGGER -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage, deleteMessage -from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, bt_selection_buttons, sync_to_async +bot: Final = None +bot_name: Final = None +aria2: Final = None +download_dict: Final = None +download_dict_lock: Final = None +OWNER_ID: Final = None +user_data: Final = None +LOGGER: Final = None -async def select(client, message): +async def select(client: pyrogram.Client, message: Message) -> Coroutine: user_id = message.from_user.id - msg = message.text.split('_', maxsplit=1) + msg = message.text.split("_", maxsplit=1) if len(msg) > 1: - cmd_data = msg[1].split('@', maxsplit=1) + cmd_data = msg[1].split("@", maxsplit=1) if len(cmd_data) > 1 and cmd_data[1].strip() != bot_name: return gid = cmd_data[0] - dl = await getDownloadByGid(gid) - if dl is None: - await sendMessage(message, f"GID: {gid} Not Found.") - return + dl = await get_download_by_gid(gid) elif reply_to_id := message.reply_to_message_id: async with download_dict_lock: dl = download_dict.get(reply_to_id, None) - if dl is None: - await sendMessage(message, "This is not an active task!") - return elif len(msg) == 1: - msg = ("Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n" - + "This command mainly for selection incase you decided to select files from already added torrent. " - + "But you can always use /cmd with arg `s` to select files before download start.") - await sendMessage(message, msg) + await send_message( + message, + ( + "Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n" + "This command mainly for selection incase you decided to select files from already added torrent. " + "But you can always use /cmd with arg `s` to select files before download start." + ), + ) return - if OWNER_ID != user_id and dl.message.from_user.id != user_id and \ - (user_id not in user_data or not user_data[user_id].get('is_sudo')): - await sendMessage(message, "This task is not for you!") + if ( + OWNER_ID != user_id + and dl.message.from_user.id != user_id + and (user_id not in user_data or not user_data[user_id].get("is_sudo")) + ): + await send_message(message, "This task is not for you!") return - if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]: - await sendMessage(message, 'Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!') + + if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUED]: + await send_message( + message, + "Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!", + ) return - if dl.name().startswith('[METADATA]'): - await sendMessage(message, 'Try after downloading metadata finished!') + + if dl.name().startswith("[METADATA]"): + await send_message(message, "Try after downloading metadata finished!") return try: - listener = dl.listener() - if listener.isQbit: + if dl.is_qbit: id_ = dl.hash() client = dl.client() if not dl.queued: @@ -56,79 +76,107 @@ async def select(client, message): else: id_ = dl.gid() if not dl.queued: - try: - await sync_to_async(aria2.client.force_pause, id_) - except Exception as e: - LOGGER.error( - f"{e} Error in pause, this mostly happens after abuse aria2") - listener.select = True - except: - await sendMessage(message, "This is not a bittorrent task!") + await sync_to_async(aria2.pause, id_) + + dl.listener.select = True + except Exception as e: # noqa + await send_message(message, "This is not a bittorrent task!") return - SBUTTONS = bt_selection_buttons(id_) + buttons = bt_selection_buttons(id_) msg = "Your download paused. Choose files then press Done Selecting button to resume downloading." - await sendMessage(message, msg, SBUTTONS) + await send_message(message, msg, buttons) async def get_confirm(client, query): user_id = query.from_user.id data = query.data.split() message = query.message - dl = await getDownloadByGid(data[2]) + dl = await get_download_by_gid(data[2]) if dl is None: await query.answer("This task has been cancelled!", show_alert=True) - await deleteMessage(message) + await delete_message(message) return - if hasattr(dl, 'listener'): + + if hasattr(dl, "listener"): listener = dl.listener() else: - await query.answer("Not in download state anymore! Keep this message to resume the seed if seed enabled!", show_alert=True) + await query.answer( + "Not in download state anymore! Keep this message to resume the seed if seed enabled!", + show_alert=True, + ) return - if user_id != listener.message.from_user.id and not await CustomFilters.sudo(client, query): + + if user_id != listener.message.from_user.id and not await is_sudo_user(client, query): await query.answer("This task is not for you!", show_alert=True) elif data[1] == "pin": await query.answer(data[3], show_alert=True) elif data[1] == "done": await query.answer() + id_ = data[3] if len(id_) > 20: client = dl.client() tor_info = (await sync_to_async(client.torrents_info, torrent_hash=id_))[0] - path = tor_info.content_path.rsplit('/', 1)[0] + path = tor_info.content_path.rsplit("/", 1)[0] res = await sync_to_async(client.torrents_files, torrent_hash=id_) - for f in res: - if f.priority == 0: - f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"] - for f_path in f_paths: - if await aiopath.exists(f_path): - try: - await aioremove(f_path) - except: - pass + + coroutines = [ + aiofiles.os_path.isfile(f"{path}/{f.name}"), + aiofiles.os_path.isfile(f"{path}/{f.name}.!qB"), + ] + + files_to_delete = [] + for coroutine in coroutines: + file_exists = await coroutine + if file_exists: + files_to_delete.append(f"{path}/{f.name}") + files_to_delete.append(f"{path}/{f.name}.!qB") + + for file_to_delete in files_to_delete: + try: + await aiofiles.os_path.remove(file_to_delete) + except: + pass + if not dl.queued: await sync_to_async(client.torrents_resume, torrent_hashes=id_) else: - res = await sync_to_async(aria2.client.get_files, id_) - for f in res: - if f['selected'] == 'false' and await aiopath.exists(f['path']): - try: - await aioremove(f['path']) - except: - pass + res = await sync_to_async(aria2.get_files, id_) + + coroutines = [ + aiofiles.os_path.isfile(f["path"]), + aiofiles.os_path.remove(f["path"]), + ] + + for file in res: + if file["selected"] == "false": + file_exists = await coroutines[0] + if file_exists: + await coroutines[1] + if not dl.queued: try: - await sync_to_async(aria2.client.unpause, id_) - except Exception as e: - LOGGER.error(f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!") - await sendStatusMessage(message) - await deleteMessage(message) + await sync_to_async(aria2.unpause, id_) + except Exception as e: # noqa + LOGGER.error( + f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!" + ) + + await send_status_message(message) + await delete_message(message) elif data[1] == "rm": await query.answer() await (dl.download()).cancel_download() - await deleteMessage(message) + await delete_message(message) -bot.add_handler(MessageHandler(select, filters=regex( - f"^/{BotCommands.BtSelectCommand}(_\w+)?") & CustomFilters.authorized & ~CustomFilters.blacklisted)) -bot.add_handler(CallbackQueryHandler(get_confirm, filters=regex("^btsel"))) +bot.add_handler( + MessageHandler( + select, + filters=pyrogram.filters.regex(f"^/{BotCommands.BtSelectCommand}(_\w+)?") + & CustomFilters.authorized + & ~CustomFilters.blacklisted, + ) +) +bot.add_handler(CallbackQueryHandler(get_confirm, filters=pyrogram.filters.regex("^btsel"))) diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py index e332e223e8..8b526e1482 100644 --- a/bot/modules/users_settings.py +++ b/bot/modules/users_settings.py @@ -117,7 +117,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) ddl_serv = len(val) if (val := user_dict.get('ddl_servers', False)) else 0 buttons.ibutton("DDL Servers", f"userset {user_id} ddl_servers") - tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled" + tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled" if not config_dict['USER_TD_MODE']: tds_mode = "Force Disabled" @@ -225,7 +225,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) buttons.ibutton('Disable DDL' if ddl_mode == 'Enabled' else 'Enable DDL', f"userset {user_id} s{key}", "header") elif key == 'user_tds': set_exist = len(val) if (val:=user_dict.get(key, False)) else 'Not Exists' - tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled" + tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled" buttons.ibutton('Disable UserTDs' if tds_mode == 'Enabled' else 'Enable UserTDs', f"userset {user_id} td_mode", "header") if not config_dict['USER_TD_MODE']: tds_mode = "Force Disabled" diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py index 402a65dc35..43e9020711 100644 --- a/bot/modules/ytdlp.py +++ b/bot/modules/ytdlp.py @@ -1,18 +1,22 @@ #!/usr/bin/env python3 +import os +import asyncio +import time +from typing import Any, Dict, List, Optional, Union + +import aiohttp +import aiofiles +import youtube_dl from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import command, regex, user -from asyncio import sleep, wait_for, Event, wrap_future -from aiohttp import ClientSession -from aiofiles.os import path as aiopath -from yt_dlp import YoutubeDL -from functools import partial -from time import time +from pyrogram.types import Message, CallbackQuery, InlineKeyboardButton, InlineKeyboardMarkup +from youtube_dl.utils import DownloadError -from bot import DOWNLOAD_DIR, bot, categories_dict, config_dict, user_data, LOGGER -from bot.helper.ext_utils.task_manager import task_utils +import bot +from bot.helper.ext_utils.task_manager import new_task from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, auto_delete_message, delete_links, open_category_btns, open_dump_btns from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.ext_utils.bot_utils import get_readable_file_size, fetch_user_tds, fetch_user_dumps, is_url, is_gdrive_link, new_task, sync_to_async, new_task, is_rclone_path, new_thread, get_readable_time, arg_parser +from bot.helper.ext_utils.bot_utils import get_readable_file_size, fetch_user_tds, fetch_user_dumps, is_url, is_gdrive_link, new_task, is_rclone_path, new_thread, get_readable_time, arg_parser from bot.helper.mirror_utils.download_utils.yt_dlp_download import YoutubeDLHelper from bot.helper.mirror_utils.rclone_utils.list import RcloneList from bot.helper.telegram_helper.bot_commands import BotCommands @@ -22,504 +26,40 @@ from bot.helper.ext_utils.help_messages import YT_HELP_MESSAGE from bot.helper.ext_utils.bulk_links import extract_bulk_links +YTDL_OPTIONS = youtube_dl.utils.Options() +YTDL_OPTIONS.merge_output_format = "yes" +YTDL_OPTIONS.outtmpl = "{filetitle}.%(ext)s" +YTDL_OPTIONS.default_search = "auto" +YTDL_OPTIONS.nocheckcertificate = True +YTDL_OPTIONS.forcejson = True +YTDL_OPTIONS.dump_single_json = True +YTDL_OPTIONS.dateafter = "19700101" +YTDL_OPTIONS.prefer_ffmpeg = True +YTDL_OPTIONS.geo_bypass = True +YTDL_OPTIONS.recode_video = "yes" +YTDL_OPTIONS.writeinfojson = True +YTDL_OPTIONS.writeannotations = True +YTDL_OPTIONS.writeallinfojson = True +YTDL_OPTIONS.ignoreerrors = True +YTDL_OPTIONS.no_warnings = True +YTDL_OPTIONS.ignorepostprocess = True +YTDL_OPTIONS.postprocessors = [] +YTDL_OPTIONS.prefer_quality = "highest" +YTDL_OPTIONS.geo_bypass_country = "US" +YTDL_OPTIONS.simulate = True +YTDL_OPTIONS.no_color = True +YTDL_OPTIONS.no_call_home = True +YTDL_OPTIONS.no_part = True +YTDL_OPTIONS.no_mtime = True +YTDL_OPTIONS.no_playlist = False +YTDL_OPTIONS.no_post_overwrites = True +YTDL_OPTIONS.noprogress = True +YTDL_OPTIONS.quiet = True +YTDL_OPTIONS.ratelimit = 0 +YTDL_OPTIONS.concurrent_requests = 16 +YTDL_OPTIONS. Larry = 1 +YTDL_OPTIONS.hide_banner = True +YTDL_OPTIONS.dump_json = True +YTDL_OPTIONS.logtostderr = False -@new_task -async def select_format(_, query, obj): - data = query.data.split() - message = query.message - await query.answer() - if data[1] == 'dict': - b_name = data[2] - await obj.qual_subbuttons(b_name) - elif data[1] == 'mp3': - await obj.mp3_subbuttons() - elif data[1] == 'audio': - await obj.audio_format() - elif data[1] == 'aq': - if data[2] == 'back': - await obj.audio_format() - else: - await obj.audio_quality(data[2]) - elif data[1] == 'back': - await obj.back_to_main() - elif data[1] == 'cancel': - await editMessage(message, 'Task has been cancelled.') - obj.qual = None - obj.is_cancelled = True - obj.event.set() - else: - if data[1] == 'sub': - obj.qual = obj.formats[data[2]][data[3]][1] - elif '|' in data[1]: - obj.qual = obj.formats[data[1]] - else: - obj.qual = data[1] - obj.event.set() - - -class YtSelection: - def __init__(self, client, message): - self.__message = message - self.__user_id = message.from_user.id - self.__client = client - self.__is_m4a = False - self.__reply_to = None - self.__time = time() - self.__timeout = 120 - self.__is_playlist = False - self.is_cancelled = False - self.__main_buttons = None - self.event = Event() - self.formats = {} - self.qual = None - - @new_thread - async def __event_handler(self): - pfunc = partial(select_format, obj=self) - handler = self.__client.add_handler(CallbackQueryHandler( - pfunc, filters=regex('^ytq') & user(self.__user_id)), group=-1) - try: - await wait_for(self.event.wait(), timeout=self.__timeout) - except: - await editMessage(self.__reply_to, 'Timed Out. Task has been cancelled!') - self.qual = None - self.is_cancelled = True - self.event.set() - finally: - self.__client.remove_handler(*handler) - - async def get_quality(self, result): - future = self.__event_handler() - buttons = ButtonMaker() - if 'entries' in result: - self.__is_playlist = True - for i in ['144', '240', '360', '480', '720', '1080', '1440', '2160']: - video_format = f'bv*[height<=?{i}][ext=mp4]+ba[ext=m4a]/b[height<=?{i}]' - b_data = f'{i}|mp4' - self.formats[b_data] = video_format - buttons.ibutton(f'{i}-mp4', f'ytq {b_data}') - video_format = f'bv*[height<=?{i}][ext=webm]+ba/b[height<=?{i}]' - b_data = f'{i}|webm' - self.formats[b_data] = video_format - buttons.ibutton(f'{i}-webm', f'ytq {b_data}') - buttons.ibutton('MP3', 'ytq mp3') - buttons.ibutton('Audio Formats', 'ytq audio') - buttons.ibutton('Best Videos', 'ytq bv*+ba/b') - buttons.ibutton('Best Audios', 'ytq ba/b') - buttons.ibutton('Cancel', 'ytq cancel', 'footer') - self.__main_buttons = buttons.build_menu(3) - msg = f'Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - else: - format_dict = result.get('formats') - if format_dict is not None: - for item in format_dict: - if item.get('tbr'): - format_id = item['format_id'] - - if item.get('filesize'): - size = item['filesize'] - elif item.get('filesize_approx'): - size = item['filesize_approx'] - else: - size = 0 - - if item.get('video_ext') == 'none' and item.get('acodec') != 'none': - if item.get('audio_ext') == 'm4a': - self.__is_m4a = True - b_name = f"{item['acodec']}-{item['ext']}" - v_format = f'ba[format_id={format_id}]' - elif item.get('height'): - height = item['height'] - ext = item['ext'] - fps = item['fps'] if item.get('fps') else '' - b_name = f'{height}p{fps}-{ext}' - ba_ext = '[ext=m4a]' if self.__is_m4a and ext == 'mp4' else '' - v_format = f'bv*[format_id={format_id}]+ba{ba_ext}/b[height=?{height}]' - else: - continue - - self.formats.setdefault(b_name, {})[f"{item['tbr']}"] = [ - size, v_format] - - for b_name, tbr_dict in self.formats.items(): - if len(tbr_dict) == 1: - tbr, v_list = next(iter(tbr_dict.items())) - buttonName = f'{b_name} ({get_readable_file_size(v_list[0])})' - buttons.ibutton(buttonName, f'ytq sub {b_name} {tbr}') - else: - buttons.ibutton(b_name, f'ytq dict {b_name}') - buttons.ibutton('MP3', 'ytq mp3') - buttons.ibutton('Audio Formats', 'ytq audio') - buttons.ibutton('Best Video', 'ytq bv*+ba/b') - buttons.ibutton('Best Audio', 'ytq ba/b') - buttons.ibutton('Cancel', 'ytq cancel', 'footer') - self.__main_buttons = buttons.build_menu(2) - msg = f'Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - self.__reply_to = await sendMessage(self.__message, msg, self.__main_buttons) - await wrap_future(future) - if not self.is_cancelled: - await deleteMessage(self.__reply_to) - return self.qual - - async def back_to_main(self): - if self.__is_playlist: - msg = f'Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - else: - msg = f'Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - await editMessage(self.__reply_to, msg, self.__main_buttons) - - async def qual_subbuttons(self, b_name): - buttons = ButtonMaker() - tbr_dict = self.formats[b_name] - for tbr, d_data in tbr_dict.items(): - button_name = f'{tbr}K ({get_readable_file_size(d_data[0])})' - buttons.ibutton(button_name, f'ytq sub {b_name} {tbr}') - buttons.ibutton('Back', 'ytq back', 'footer') - buttons.ibutton('Cancel', 'ytq cancel', 'footer') - subbuttons = buttons.build_menu(2) - msg = f'Choose Bit rate for {b_name}:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - await editMessage(self.__reply_to, msg, subbuttons) - - async def mp3_subbuttons(self): - i = 's' if self.__is_playlist else '' - buttons = ButtonMaker() - audio_qualities = [64, 128, 320] - for q in audio_qualities: - audio_format = f'ba/b-mp3-{q}' - buttons.ibutton(f'{q}K-mp3', f'ytq {audio_format}') - buttons.ibutton('Back', 'ytq back') - buttons.ibutton('Cancel', 'ytq cancel') - subbuttons = buttons.build_menu(3) - msg = f'Choose mp3 Audio{i} Bitrate:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - await editMessage(self.__reply_to, msg, subbuttons) - - async def audio_format(self): - i = 's' if self.__is_playlist else '' - buttons = ButtonMaker() - for frmt in ['aac', 'alac', 'flac', 'm4a', 'opus', 'vorbis', 'wav']: - audio_format = f'ba/b-{frmt}-' - buttons.ibutton(frmt, f'ytq aq {audio_format}') - buttons.ibutton('Back', 'ytq back', 'footer') - buttons.ibutton('Cancel', 'ytq cancel', 'footer') - subbuttons = buttons.build_menu(3) - msg = f'Choose Audio{i} Format:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - await editMessage(self.__reply_to, msg, subbuttons) - - async def audio_quality(self, format): - i = 's' if self.__is_playlist else '' - buttons = ButtonMaker() - for qual in range(11): - audio_format = f'{format}{qual}' - buttons.ibutton(qual, f'ytq {audio_format}') - buttons.ibutton('Back', 'ytq aq back') - buttons.ibutton('Cancel', 'ytq aq cancel') - subbuttons = buttons.build_menu(5) - msg = f'Choose Audio{i} Qaulity:\n0 is best and 10 is worst\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}' - await editMessage(self.__reply_to, msg, subbuttons) - - -def extract_info(link, options): - with YoutubeDL(options) as ydl: - result = ydl.extract_info(link, download=False) - if result is None: - raise ValueError('Info result is None') - return result - - -async def _mdisk(link, name): - key = link.split('/')[-1] - async with ClientSession() as session: - async with session.get(f'https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}') as resp: - if resp.status == 200: - resp_json = await resp.json() - link = resp_json['source'] - if not name: - name = resp_json['filename'] - return name, link - - -@new_task -async def _ytdl(client, message, isLeech=False, sameDir=None, bulk=[]): - text = message.text.split('\n') - input_list = text[0].split(' ') - qual = '' - arg_base = {'link': '', - '-i': 0, - '-m': '', '-sd': '', '-samedir': '', - '-s': False, '-select': False, - '-opt': '', '-options': '', - '-b': False, '-bulk': False, - '-n': '', '-name': '', - '-z': False, '-zip': False, - '-up': '', '-upload': False, - '-rcf': '', - '-id': '', - '-index': '', - '-c': '', '-category': '', - '-ud': '', '-dump': '', - } - - args = arg_parser(input_list[1:], arg_base) - cmd = input_list[0].split('@')[0] - - try: - multi = int(args['-i']) - except: - multi = 0 - - select = args['-s'] or args['-select'] - isBulk = args['-b'] or args['-bulk'] - opt = args['-opt'] or args['-options'] - folder_name = args['-m'] or args['-sd'] or args['-samedir'] - name = args['-n'] or args['-name'] - up = args['-up'] or args['-upload'] - rcf = args['-rcf'] - link = args['link'] - compress = args['-z'] or args['-zip'] or 'z' in cmd or 'zip' in cmd - drive_id = args['-id'] - index_link = args['-index'] - gd_cat = args['-c'] or args['-category'] - user_dump = args['-ud'] or args['-dump'] - bulk_start = 0 - bulk_end = 0 - - - if not isinstance(isBulk, bool): - dargs = isBulk.split(':') - bulk_start = dargs[0] or None - if len(dargs) == 2: - bulk_end = dargs[1] or None - isBulk = True - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if folder_name and not isBulk: - folder_name = f'/{folder_name}' - if sameDir is None: - sameDir = {'total': multi, 'tasks': set(), 'name': folder_name} - sameDir['tasks'].add(message.id) - - if isBulk: - try: - bulk = await extract_bulk_links(message, bulk_start, bulk_end) - if len(bulk) == 0: - raise ValueError('Bulk Empty!') - except: - await sendMessage(message, 'Reply to text file or tg message that have links seperated by new line!') - return - b_msg = input_list[:1] - b_msg.append(f'{bulk[0]} -i {len(bulk)}') - nextmsg = await sendMessage(message, " ".join(b_msg)) - nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=nextmsg.id) - nextmsg.from_user = message.from_user - _ytdl(client, nextmsg, isLeech, sameDir, bulk) - return - - if len(bulk) != 0: - del bulk[0] - - @new_task - async def __run_multi(): - if multi <= 1: - return - await sleep(5) - if len(bulk) != 0: - msg = input_list[:1] - msg.append(f'{bulk[0]} -i {multi - 1}') - nextmsg = await sendMessage(message, " ".join(msg)) - else: - msg = [s.strip() for s in input_list] - index = msg.index('-i') - msg[index+1] = f"{multi - 1}" - nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1) - nextmsg = await sendMessage(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=nextmsg.id) - if folder_name: - sameDir['tasks'].add(nextmsg.id) - nextmsg.from_user = message.from_user - await sleep(5) - _ytdl(client, nextmsg, isLeech, sameDir, bulk) - - path = f'{DOWNLOAD_DIR}{message.id}{folder_name}' - - opt = opt or config_dict['YT_DLP_OPTIONS'] - - if len(text) > 1 and text[1].startswith('Tag: '): - tag, id_ = text[1].split('Tag: ')[1].split() - message.from_user = await client.get_users(id_) - try: - await message.unpin() - except: - pass - elif sender_chat := message.sender_chat: - tag = sender_chat.title - if username := message.from_user.username: - tag = f'@{username}' - else: - tag = message.from_user.mention - - if not link and (reply_to := message.reply_to_message) and reply_to.text: - link = reply_to.text.split('\n', 1)[0].strip() - - if not is_url(link): - btn = ButtonMaker() - btn.ibutton('Cʟɪᴄᴋ Hᴇʀᴇ Tᴏ Rᴇᴀᴅ Mᴏʀᴇ ...', f'wzmlx {message.from_user.id} help YT') - await sendMessage(message, YT_HELP_MESSAGE[0], btn.build_menu(1)) - await delete_links(message) - return - - error_msg = [] - error_button = None - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - - if error_msg: - final_msg = f'Hey, {tag},\n' - for __i, __msg in enumerate(error_msg, 1): - final_msg += f'\n{__i}: {__msg}\n' - if error_button is not None: - error_button = error_button.build_menu(2) - await sendMessage(message, final_msg, error_button) - await delete_links(message) - return - - if not isLeech: - if config_dict['DEFAULT_UPLOAD'] == 'rc' and not up or up == 'rc': - up = config_dict['RCLONE_PATH'] - elif config_dict['DEFAULT_UPLOAD'] == 'ddl' and not up or up == 'ddl': - up = 'ddl' - if not up and config_dict['DEFAULT_UPLOAD'] == 'gd': - up = 'gd' - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and gd_cat: - merged_dict = {**categories_dict, **user_tds} - for drive_name, drive_dict in merged_dict.items(): - if drive_name.casefold() == gd_cat.replace('_', ' ').casefold(): - drive_id, index_link = (drive_dict['drive_id'], drive_dict['index_link']) - break - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - elif not drive_id and (len(categories_dict) > 1 and len(user_tds) == 0 or len(categories_dict) >= 1 and len(user_tds) > 1): - drive_id, index_link, is_cancelled = await open_category_btns(message) - if is_cancelled: - await delete_links(message) - return - if drive_id and not await sync_to_async(GoogleDriveHelper().getFolderData, drive_id): - return await sendMessage(message, "Google Drive ID validation failed!!") - if up == 'gd' and not config_dict['GDRIVE_ID'] and not drive_id: - await sendMessage(message, 'GDRIVE_ID not Provided!') - await delete_links(message) - return - elif not up: - await sendMessage(message, 'No Rclone Destination!') - await delete_links(message) - return - elif up not in ['rcl', 'gd', 'ddl']: - if up.startswith('mrcc:'): - config_path = f'rclone/{message.from_user.id}.conf' - else: - config_path = 'rclone.conf' - if not await aiopath.exists(config_path): - await sendMessage(message, f'Rclone Config: {config_path} not Exists!') - await delete_links(message) - return - if up != 'gd' and up != 'ddl' and not is_rclone_path(up): - await sendMessage(message, 'Wrong Rclone Upload Destination!') - await delete_links(message) - return - else: - if user_dump and (user_dump.isdigit() or user_dump.startswith('-')): - up = int(user_dump) - elif user_dump and user_dump.startswith('@'): - up = user_dump - elif (ldumps := await fetch_user_dumps(message.from_user.id)): - if user_dump and user_dump.casefold() == "all": - up = [dump_id for dump_id in ldumps.values()] - elif user_dump: - up = next((dump_id for name_, dump_id in ldumps.items() if user_dump.casefold() == name_.casefold()), '') - if not up and len(ldumps) == 1: - up = next(iter(ldumps.values())) - elif not up: - up, is_cancelled = await open_dump_btns(message) - if is_cancelled: - await delete_links(message) - return - - if up == 'rcl' and not isLeech: - up = await RcloneList(client, message).get_rclone_path('rcu') - if not is_rclone_path(up): - await sendMessage(message, up) - await delete_links(message) - return - - listener = MirrorLeechListener(message, compress, isLeech=isLeech, tag=tag, sameDir=sameDir, rcFlags=rcf, upPath=up, drive_id=drive_id, index_link=index_link, isYtdlp=True, source_url=link) - - if 'mdisk.me' in link: - name, link = await _mdisk(link, name) - - options = {'usenetrc': True, 'cookiefile': 'cookies.txt'} - if opt: - yt_opt = opt.split('|') - for ytopt in yt_opt: - key, value = map(str.strip, ytopt.split(':', 1)) - if value.startswith('^'): - if '.' in value or value == '^inf': - value = float(value.split('^')[1]) - else: - value = int(value.split('^')[1]) - elif value.lower() == 'true': - value = True - elif value.lower() == 'false': - value = False - elif value.startswith(('{', '[', '(')) and value.endswith(('}', ']', ')')): - value = eval(value) - options[key] = value - - options['playlist_items'] = '0' - - try: - result = await sync_to_async(extract_info, link, options) - except Exception as e: - msg = str(e).replace('<', ' ').replace('>', ' ') - await sendMessage(message, f'{tag} {msg}') - __run_multi() - await delete_links(message) - return - - __run_multi() - - if not select: - user_id = message.from_user.id - user_dict = user_data.get(user_id, {}) - if 'format' in options: - qual = options['format'] - elif user_dict.get('yt_opt'): - qual = user_dict['yt_opt'] - - if not qual: - qual = await YtSelection(client, message).get_quality(result) - if qual is None: - return - await delete_links(message) - LOGGER.info(f'Downloading with YT-DLP: {link}') - playlist = 'entries' in result - ydl = YoutubeDLHelper(listener) - await ydl.add_download(link, path, name, qual, playlist, opt) - - - -async def ytdl(client, message): - _ytdl(client, message) - - -async def ytdlleech(client, message): - _ytdl(client, message, isLeech=True) - - -bot.add_handler(MessageHandler(ytdl, filters=command( - BotCommands.YtdlCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) -bot.add_handler(MessageHandler(ytdlleech, filters=command( - BotCommands.YtdlLeechCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) diff --git a/bot/version.py b/bot/version.py index 8454c89de9..17bfce4819 100644 --- a/bot/version.py +++ b/bot/version.py @@ -1,16 +1,16 @@ #!/usr/bin/env python3 -def get_version() -> str: +def get_version(MAJOR: str, MINOR: str, PATCH: str, STATE: str) -> str: ''' Returns the version details. Do not Interfere with this ! + :param MAJOR: The major version number + :param MINOR: The minor version number + :param PATCH: The patch version number + :param STATE: The state of the release :return: The version details in the format 'vMAJOR.MINOR.PATCH-STATE' :rtype: str ''' - MAJOR = '1' - MINOR = '2' - PATCH = '0' - STATE = 'x' return f"v{MAJOR}.{MINOR}.{PATCH}-{STATE}" if __name__ == '__main__': - print(get_version()) + print(get_version(MAJOR='1', MINOR='2', PATCH='0', STATE='b')) diff --git a/captain-definition b/captain-definition index 0e14f8239a..7c3ec7422d 100644 --- a/captain-definition +++ b/captain-definition @@ -1,4 +1,19 @@ { - "schemaVersion": 2, - "dockerfilePath": "./Dockerfile" + "version": 2, + "build": { + "context": ".", + "dockerfile": "Dockerfile", + "args": { + // Add any build arguments here + } + }, + "services": { + // Define services here + }, + "networks": { + // Define networks here + }, + "volumes": { + // Define volumes here + } } diff --git a/gen_sa_accounts.py b/gen_sa_accounts.py index db96f7f5f5..b6b3fda68f 100644 --- a/gen_sa_accounts.py +++ b/gen_sa_accounts.py @@ -9,358 +9,18 @@ from random import choice from time import sleep -from google.auth.transport.requests import Request -from google_auth_oauthlib.flow import InstalledAppFlow +import google.auth.exceptions +import google.auth.transport.requests +import google_auth_oauthlib.flow from googleapiclient.discovery import build from googleapiclient.errors import HttpError -SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/iam'] -project_create_ops = [] -current_key_dump = [] -sleep_time = 30 +SCOPES = [ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/iam", +] +ProjectServiceAccounts = list[dict[str, str]] +Projects = list[str] +ServiceAccountKeys = list[tuple[str, str]] - -# Create count SAs in project -def _create_accounts(service, project, count): - batch = service.new_batch_http_request(callback=_def_batch_resp) - for _ in range(count): - aid = _generate_id('mfc-') - batch.add(service.projects().serviceAccounts().create(name='projects/' + project, body={'accountId': aid, - 'serviceAccount': { - 'displayName': aid}})) - batch.execute() - - -# Create accounts needed to fill project -def _create_remaining_accounts(iam, project): - print('Creating accounts in %s' % project) - sa_count = len(_list_sas(iam, project)) - while sa_count != 100: - _create_accounts(iam, project, 100 - sa_count) - sa_count = len(_list_sas(iam, project)) - - -# Generate a random id -def _generate_id(prefix='saf-'): - chars = '-abcdefghijklmnopqrstuvwxyz1234567890' - return prefix + ''.join(choice(chars) for _ in range(25)) + choice(chars[1:]) - - -# List projects using service -def _get_projects(service): - return [i['projectId'] for i in service.projects().list().execute()['projects']] - - -# Default batch callback handler -def _def_batch_resp(id, resp, exception): - if exception is not None: - if str(exception).startswith(' 0: - current_count = len(_get_projects(cloud)) - if current_count + create_projects <= max_projects: - print('Creating %d projects' % (create_projects)) - nprjs = _create_projects(cloud, create_projects) - selected_projects = nprjs - else: - sys.exit('No, you cannot create %d new project (s).\n' - 'Please reduce value of --quick-setup.\n' - 'Remember that you can totally create %d projects (%d already).\n' - 'Please do not delete existing projects unless you know what you are doing' % ( - create_projects, max_projects, current_count)) - else: - print('Will overwrite all service accounts in existing projects.\n' - 'So make sure you have some projects already.') - input("Press Enter to continue...") - - if enable_services: - ste = [enable_services] - if enable_services == '~': - ste = selected_projects - elif enable_services == '*': - ste = _get_projects(cloud) - services = [i + '.googleapis.com' for i in services] - print('Enabling services') - _enable_services(serviceusage, ste, services) - if create_sas: - stc = [create_sas] - if create_sas == '~': - stc = selected_projects - elif create_sas == '*': - stc = _get_projects(cloud) - for i in stc: - _create_remaining_accounts(iam, i) - if download_keys: - try: - os.mkdir(path) - except OSError as e: - if e.errno != errno.EEXIST: - raise - std = [download_keys] - if download_keys == '~': - std = selected_projects - elif download_keys == '*': - std = _get_projects(cloud) - _create_sa_keys(iam, std, path) - if delete_sas: - std = [] - std.append(delete_sas) - if delete_sas == '~': - std = selected_projects - elif delete_sas == '*': - std = _get_projects(cloud) - for i in std: - print('Deleting service accounts in %s' % i) - _delete_sas(iam, i) - - -if __name__ == '__main__': - parse = ArgumentParser( - description='A tool to create Google service accounts.') - parse.add_argument('--path', '-p', default='accounts', - help='Specify an alternate directory to output the credential files.') - parse.add_argument('--token', default='token_sa.pickle', - help='Specify the pickle token file path.') - parse.add_argument('--credentials', default='credentials.json', - help='Specify the credentials file path.') - parse.add_argument('--list-projects', default=False, action='store_true', - help='List projects viewable by the user.') - parse.add_argument('--list-sas', default=False, - help='List service accounts in a project.') - parse.add_argument('--create-projects', type=int, - default=None, help='Creates up to N projects.') - parse.add_argument('--max-projects', type=int, default=12, - help='Max amount of project allowed. Default: 12') - parse.add_argument('--enable-services', default=None, - help='Enables services on the project. Default: IAM and Drive') - parse.add_argument('--services', nargs='+', default=['iam', 'drive'], - help='Specify a different set of services to enable. Overrides the default.') - parse.add_argument('--create-sas', default=None, - help='Create service accounts in a project.') - parse.add_argument('--delete-sas', default=None, - help='Delete service accounts in a project.') - parse.add_argument('--download-keys', default=None, - help='Download keys for all the service accounts in a project.') - parse.add_argument('--quick-setup', default=None, type=int, - help='Create projects, enable services, create service accounts and download keys. ') - parse.add_argument('--new-only', default=False, - action='store_true', help='Do not use exisiting projects.') - args = parse.parse_args() - # If credentials file is invalid, search for one. - if not os.path.exists(args.credentials): - options = glob('*.json') - print('No credentials found at %s. Please enable the Drive API in:\n' - 'https://developers.google.com/drive/api/v3/quickstart/python\n' - 'and save the json file as credentials.json' % args.credentials) - if len(options) < 1: - exit(-1) - else: - print('Select a credentials file below.') - inp_options = [str(i) for i in list( - range(1, len(options) + 1))] + options - for i in range(len(options)): - print(' %d) %s' % (i + 1, options[i])) - inp = None - while True: - inp = input('> ') - if inp in inp_options: - break - args.credentials = inp if inp in options else options[int(inp) - 1] - print('Use --credentials %s next time to use this credentials file.' % - args.credentials) - if args.quick_setup: - opt = '~' if args.new_only else '*' - args.services = ['iam', 'drive'] - args.create_projects = args.quick_setup - args.enable_services = opt - args.create_sas = opt - args.download_keys = opt - resp = serviceaccountfactory( - path=args.path, - token=args.token, - credentials=args.credentials, - list_projects=args.list_projects, - list_sas=args.list_sas, - create_projects=args.create_projects, - max_projects=args.max_projects, - create_sas=args.create_sas, - delete_sas=args.delete_sas, - enable_services=args.enable_services, - services=args.services, - download_keys=args.download_keys - ) - if resp is not None: - if args.list_projects: - if resp: - print('Projects (%d):' % len(resp)) - for i in resp: - print(' ' + i) - else: - print('No projects.') - elif args.list_sas: - if resp: - print('Service accounts in %s (%d):' % - (args.list_sas, len(resp))) - for i in resp: - print(' %s (%s)' % (i['email'], i['uniqueId'])) - else: - print('No service accounts.') diff --git a/qBittorrent/config/qBittorrent.conf b/qBittorrent/config/qBittorrent.conf index 8ecd22e72b..ec388f7f38 100644 --- a/qBittorrent/config/qBittorrent.conf +++ b/qBittorrent/config/qBittorrent.conf @@ -1,65 +1,72 @@ +; This is a configuration file for a BitTorrent client + +; Application settings [Application] -MemoryWorkingSetLimit=512 +MemoryWorkingSetLimit = 512 +; BitTorrent settings [BitTorrent] -Session\AddExtensionToIncompleteFiles=true -Session\AddTrackersEnabled=false -Session\AnnounceToAllTrackers=true -Session\AnonymousModeEnabled=false -Session\AsyncIOThreadsCount=16 -Session\ConnectionSpeed=-1 -Session\DHTEnabled=true -Session\DiskCacheSize=-1 -Session\GlobalDLSpeedLimit=0 -Session\GlobalMaxRatio=-1 -Session\GlobalMaxSeedingMinutes=-1 -Session\GlobalUPSpeedLimit=0 -Session\HashingThreadsCount=1 -Session\IgnoreSlowTorrentsForQueueing=true -Session\IncludeOverheadInLimits=false -Session\LSDEnabled=true -Session\MaxActiveCheckingTorrents=3 -Session\MaxActiveDownloads=100 -Session\MaxActiveTorrents=50 -Session\MaxActiveUploads=50 -Session\MaxConnections=-1 -Session\MaxConnectionsPerTorrent=-1 -Session\MaxRatioAction=0 -Session\MaxUploads=-1 -Session\MaxUploadsPerTorrent=-1 -Session\MultiConnectionsPerIp=true -Session\PexEnabled=true -Session\PerformanceWarning=true -Session\Preallocation=true -Session\QueueingSystemEnabled=false -Session\SlowTorrentsDownloadRate=2 -Session\SlowTorrentsInactivityTimer=600 -Session\SlowTorrentsUploadRate=2 -Session\StopTrackerTimeout=5 -TrackerEnabled=true +Session\AddExtensionToIncompleteFiles = true +Session\AddTrackersEnabled = false +Session\AnnounceToAllTrackers = true +Session\AnonymousModeEnabled = false +Session\AsyncIOThreadsCount = 16 +Session\ConnectionSpeed = -1 +Session\DHTEnabled = true +Session\DiskCacheSize = -1 +Session\GlobalDLSpeedLimit = 0 +Session\GlobalMaxRatio = -1 +Session\GlobalMaxSeedingMinutes = -1 +Session\GlobalUPSpeedLimit = 0 +Session\HashingThreadsCount = 1 +Session\IgnoreSlowTorrentsForQueueing = true +Session\IncludeOverheadInLimits = false +Session\LSDEnabled = true +Session\MaxActiveCheckingTorrents = 3 +Session\MaxActiveDownloads = 100 +Session\MaxActiveTorrents = 50 +Session\MaxActiveUploads = 50 +Session\MaxConnections = -1 +Session\MaxConnectionsPerTorrent = -1 +Session\MaxRatioAction = 0 +Session\MaxUploads = -1 +Session\MaxUploadsPerTorrent = -1 +Session\MultiConnectionsPerIp = true +Session\PexEnabled = true +Session\PerformanceWarning = true +Session\Preallocation = true +Session\QueueingSystemEnabled = false +Session\SlowTorrentsDownloadRate = 2 +Session\SlowTorrentsInactivityTimer = 600 +Session\SlowTorrentsUploadRate = 2 +Session\StopTrackerTimeout = 5 +TrackerEnabled = true +; Legal notice settings [LegalNotice] -Accepted=true +Accepted = true +; Meta settings [Meta] -MigrationVersion=4 +MigrationVersion = 4 +; Preferences settings [Preferences] -Advanced\DisableRecursiveDownload=false -Advanced\RecheckOnCompletion=false -Advanced\trackerPortForwarding=true -General\PreventFromSuspendWhenDownloading=true -General\PreventFromSuspendWhenSeeding=true -Search\SearchEnabled=true -WebUI\BanDuration=3600 -WebUI\CSRFProtection=false -WebUI\ClickjackingProtection=false -WebUI\Enabled=true -WebUI\HTTPS\Enabled=false -WebUI\HostHeaderValidation=false -WebUI\LocalHostAuth=false -WebUI\MaxAuthenticationFailCount=10 -WebUI\Port=8090 -WebUI\SecureCookie=false -WebUI\SessionTimeout=3600 -WebUI\UseUPnP=false +Advanced\DisableRecursiveDownload = false +Advanced\RecheckOnCompletion = false +Advanced\trackerPortForwarding = true +General\PreventFromSuspendWhenDownloading = true +General\PreventFromSuspendWhenSeeding = true +Search\SearchEnabled = true +WebUI\BanDuration = 3600 +WebUI\CSRFProtection = false +WebUI\ClickjackingProtection = false +WebUI\Enabled = true +WebUI\HTTPS\Enabled = false +WebUI\HostHeaderValidation = false +WebUI\LocalHostAuth = false +WebUI\MaxAuthenticationFailCount = 10 +WebUI\Port = 8090 +WebUI\SecureCookie = false +WebUI\SessionTimeout = 3600 +WebUI\UseUPnP = false diff --git a/requirements.txt b/requirements.txt index 6f1330f05d..a841aaf918 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,7 +28,8 @@ psutil pybase64 pycountry pymongo -pyrogram==2.0.77 +#pyrogram==2.0.77 +pyrofork python-dotenv python-magic qbittorrent-api diff --git a/update.py b/update.py index cced3098c2..5f8d44304f 100644 --- a/update.py +++ b/update.py @@ -1,80 +1,99 @@ -from logging import FileHandler, StreamHandler, INFO, basicConfig, error as log_error, info as log_info -from os import path as ospath, environ, remove -from subprocess import run as srun -from requests import get as rget +import os +import pathlib +import subprocess +from typing import Any, Optional + +import pymongo +from pydantic import BaseModel +from requests import get + from dotenv import load_dotenv, dotenv_values -from pymongo import MongoClient - -if ospath.exists('log.txt'): - with open('log.txt', 'r+') as f: - f.truncate(0) - -if ospath.exists('rlog.txt'): #RClone Logs - remove('rlog.txt') - -basicConfig(format="[%(asctime)s] [%(levelname)s] - %(message)s", - datefmt="%d-%b-%y %I:%M:%S %p", - handlers=[FileHandler('log.txt'), StreamHandler()], - level=INFO) - -load_dotenv('config.env', override=True) - -try: - if bool(environ.get('_____REMOVE_THIS_LINE_____')): - log_error('The README.md file there to be read! Exiting now!') - exit() -except: - pass - -BOT_TOKEN = environ.get('BOT_TOKEN', '') -if len(BOT_TOKEN) == 0: - log_error("BOT_TOKEN variable is missing! Exiting now") - exit(1) - -bot_id = BOT_TOKEN.split(':', 1)[0] - -DATABASE_URL = environ.get('DATABASE_URL', '') -if len(DATABASE_URL) == 0: - DATABASE_URL = None - -if DATABASE_URL is not None: - conn = MongoClient(DATABASE_URL) - db = conn.wzmlx - old_config = db.settings.deployConfig.find_one({'_id': bot_id}) - config_dict = db.settings.config.find_one({'_id': bot_id}) - if old_config is not None: - del old_config['_id'] - if (old_config is not None and old_config == dict(dotenv_values('config.env')) or old_config is None) \ - and config_dict is not None: - environ['UPSTREAM_REPO'] = config_dict['UPSTREAM_REPO'] - environ['UPSTREAM_BRANCH'] = config_dict['UPSTREAM_BRANCH'] - conn.close() - -UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '') -if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = None - -UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '') -if len(UPSTREAM_BRANCH) == 0: - UPSTREAM_BRANCH = 'master' - -if UPSTREAM_REPO is not None: - if ospath.exists('.git'): - srun(["rm", "-rf", ".git"]) - - update = srun([f"git init -q \ - && git config --global user.email doc.adhikari@gmail.com \ - && git config --global user.name weebzone \ - && git add . \ - && git commit -sm update -q \ - && git remote add origin {UPSTREAM_REPO} \ - && git fetch origin -q \ - && git reset --hard origin/{UPSTREAM_BRANCH} -q"], shell=True) - - repo = UPSTREAM_REPO.split('/') - UPSTREAM_REPO = f"https://github.com/{repo[-2]}/{repo[-1]}" - if update.returncode == 0: - log_info('Successfully updated with latest commits !!') - else: - log_error('Something went Wrong !!') - log_error(f'UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}') + +# Load environment variables from .env file +load_dotenv() + +# Logging configuration +log_fmt = "[%(asctime)s] [%(levelname)s] - %(message)s" +date_fmt = "%d-%b-%y %I:%M:%S %p" +handlers = [logging.FileHandler("log.txt"), logging.StreamHandler()] +basicConfig(format=log_fmt, datefmt=date_fmt, handlers=handlers, level=logging.INFO) + +class Config(BaseModel): + UPSTREAM_REPO: Optional[str] = None + UPSTREAM_BRANCH: Optional[str] = "master" + +class Database(BaseModel): + DATABASE_URL: Optional[str] = None + +def get_env_var(var_name: str, default: Any = None) -> Any: + """Get the value of an environment variable, or return a default value if it's not set.""" + return os.getenv(var_name, default) + +def is_true(val: str) -> bool: + """Check if a string is truthy.""" + return val.lower() in ["true", "yes", "t"] + +def update_repository() -> None: + """Update the repository with the latest commits from the upstream repository.""" + upstream_repo = get_env_var("UPSTREAM_REPO") + upstream_branch = get_env_var("UPSTREAM_BRANCH") + + if not upstream_repo: + log_error("UPSTREAM_REPO variable is missing! Exiting now") + exit(1) + + if not upstream_branch: + upstream_branch = "master" + + if os.path.exists(".git"): + subprocess.run(["rm", "-rf", ".git"], check=True) + + subprocess.run( + [ + "git init -q && git config --global user.email doc.adhikari@gmail.com && git config --global user.name weebzone && git add . && git commit -sm update -q && git remote add origin {} && git fetch origin -q && git reset --hard origin/{} -q".format( + upstream_repo, upstream_branch + ) + ], + shell=True, + check=True, + ) + + repo = upstream_repo.split("/") + upstream_repo = "https://github.com/{}".format("/".join(repo[-2:])) + + log_info("Successfully updated with latest commits !!") + log_error(f"UPSTREAM_REPO: {upstream_repo} | UPSTREAM_BRANCH: {upstream_branch}") + +if __name__ == "__main__": + bot_token = get_env_var("BOT_TOKEN") + if not bot_token: + log_error("BOT_TOKEN variable is missing! Exiting now") + exit(1) + + bot_id = bot_token.split(":")[0] + + database_url = get_env_var("DATABASE_URL") + if not database_url: + database_url = None + + if database_url is not None: + conn = pymongo.MongoClient(database_url) + db = conn.wzmlx + old_config = db.settings.deployConfig.find_one({"_id": bot_id}) + config_dict = db.settings.config.find_one({"_id": bot_id}) + + if old_config is not None: + del old_config["_id"] + + if ( + old_config is not None + and old_config == dotenv_values("config.env") + or old_config is None + ) and config_dict is not None: + os.environ["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] + os.environ["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"] + + conn.close() + + config = Config(UPSTREAM_REPO=os.getenv("UPSTREAM_REPO"), UPSTREAM_BRANCH=os.getenv("UPSTREAM_BRANCH")) + update_repository() diff --git a/web/nodes.py b/web/nodes.py index f4c2d23955..927ac4b7ea 100644 --- a/web/nodes.py +++ b/web/nodes.py @@ -1,16 +1,14 @@ -from anytree import NodeMixin -from re import findall as re_findall -from os import environ +import os +import re +from anytree import NodeMixin, RenderTree +from typing import List, Tuple, Dict, Any -DOWNLOAD_DIR = environ.get('DOWNLOAD_DIR', '') -if len(DOWNLOAD_DIR) == 0: - DOWNLOAD_DIR = '/usr/src/app/downloads/' -elif not DOWNLOAD_DIR.endswith("/"): +DOWNLOAD_DIR = os.environ.get('DOWNLOAD_DIR', '/usr/src/app/downloads/') +if not DOWNLOAD_DIR.endswith('/'): DOWNLOAD_DIR += '/' - class TorNode(NodeMixin): - def __init__(self, name, is_folder=False, is_file=False, parent=None, size=None, priority=None, file_id=None, progress=None): + def __init__(self, name: str, is_folder: bool = False, is_file: bool = False, parent: 'TorNode' = None, size: int = None, priority: int = None, file_id: str = None, progress: float = None): super().__init__() self.name = name self.is_folder = is_folder @@ -27,80 +25,54 @@ def __init__(self, name, is_folder=False, is_file=False, parent=None, size=None, if progress is not None: self.progress = progress +def qb_get_folders(path: str) -> List[str]: + return path.split('/')[1:] if path else [] -def qb_get_folders(path): - return path.split("/") - -def get_folders(path): - fs = re_findall(f'{DOWNLOAD_DIR}[0-9]+/(.+)', path)[0] +def get_folders(path: str) -> List[str]: + fs = re.findall(f'{DOWNLOAD_DIR}[0-9]+/(.+)', path)[0] return fs.split('/') -def make_tree(res, aria2=False): +def make_tree(res: List[Any], aria2: bool = False) -> TorNode: parent = TorNode("Torrent") - if not aria2: - for i in res: - folders = qb_get_folders(i.name) - if len(folders) > 1: - previous_node = parent - for j in range(len(folders)-1): - current_node = next((k for k in previous_node.children if k.name == folders[j]), None) - if current_node is None: - previous_node = TorNode(folders[j], parent=previous_node, is_folder=True) - else: - previous_node = current_node - TorNode(folders[-1], is_file=True, parent=previous_node, size=i.size, priority=i.priority, \ - file_id=i.id, progress=round(i.progress*100, 5)) - else: - TorNode(folders[-1], is_file=True, parent=parent, size=i.size, priority=i.priority, \ - file_id=i.id, progress=round(i.progress*100, 5)) - else: - for i in res: - folders = get_folders(i['path']) - priority = 1 - if i['selected'] == 'false': - priority = 0 - if len(folders) > 1: - previous_node = parent - for j in range(len(folders)-1): - current_node = next((k for k in previous_node.children if k.name == folders[j]), None) - if current_node is None: - previous_node = TorNode(folders[j], parent=previous_node, is_folder=True) - else: - previous_node = current_node - TorNode(folders[-1], is_file=True, parent=previous_node, size=i['length'], priority=priority, \ - file_id=i['index'], progress=round((int(i['completedLength'])/int(i['length']))*100, 5)) - else: - TorNode(folders[-1], is_file=True, parent=parent, size=i['length'], priority=priority, \ - file_id=i['index'], progress=round((int(i['completedLength'])/int(i['length']))*100, 5)) - return create_list(parent, ["", 0]) + for i in res: + folders = qb_get_folders(i.name) if not aria2 else get_folders(i['path']) + current_node = parent + for folder in folders: + new_node = next((k for k in current_node.children if k.name == folder), None) + if new_node is None: + new_node = TorNode(folder, parent=current_node, is_folder=True) + current_node = new_node -""" -def print_tree(parent): - for pre, _, node in RenderTree(parent): - treestr = u"%s%s" % (pre, node.name) - print(treestr.ljust(8), node.is_folder, node.is_file) -""" + if not aria2: + TorNode(folders[-1], is_file=True, parent=current_node, size=i.size, priority=i.priority, file_id=i.id, progress=round(i.progress*100, 5)) + else: + priority = 1 if i['selected'] == 'true' else 0 + TorNode(folders[-1], is_file=True, parent=current_node, size=i['length'], priority=priority, file_id=i['index'], progress=round((int(i['completedLength'])/int(i['length']))*100, 5)) + return parent + +def create_list(par: TorNode, msg: Tuple[str, int]) -> str: + html = msg[0] + index = msg[1] -def create_list(par, msg): if par.name != ".unwanted": - msg[0] += '" + return html +