diff --git a/.gitignore b/.gitignore
index 925b8ce2ff..f74b121a72 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,21 +1,24 @@
-config.env
-*.pyc
-data*
-.vscode
-.idea
-*.json
-*.pickle
-.netrc
-log.txt
-accounts/*
-Thumbnails/*
-MediaInfo/*
-Images/*
-rclone/*
-list_drives.txt
-cookies.txt
-downloads
-bot.session
-user.session
-terabox.txt
-rclone.conf
+# A list of files and directories to be ignored by version control systems
+.gitignore = [
+ 'config.env',
+ '*.pyc',
+ 'data*',
+ '.vscode',
+ '.idea',
+ '*.json',
+ '*.pickle',
+ '.netrc',
+ 'log.txt',
+ 'accounts/*',
+ 'Thumbnails/*',
+ 'MediaInfo/*',
+ 'Images/*',
+ 'rclone/*',
+ 'list_drives.txt',
+ 'cookies.txt',
+ 'downloads/',
+ 'bot.session',
+ 'user.session',
+ 'terabox.txt',
+ 'rclone.conf'
+]
diff --git a/Dockerfile b/Dockerfile
index 6f4f74dc12..deb5c53d06 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,11 +1,20 @@
-FROM mysterysd/wzmlx:latest
+# Use an official Python runtime as the base image
+FROM python:3.9-slim-buster
-WORKDIR /usr/src/app
-RUN chmod 777 /usr/src/app
+# Set the working directory to /app
+WORKDIR /app
+# Copy the requirements file to the working directory
COPY requirements.txt .
-RUN pip3 install --no-cache-dir -r requirements.txt
+# Install any needed packages specified in requirements.txt
+RUN apt-get update && apt-get install -y --no-cache-dir \
+ build-essential \
+ && pip install --no-cache-dir -r requirements.txt
+
+# Copy the current directory contents into the container at /app
COPY . .
+# Make the script executable and run it
+RUN chmod +x start.sh
CMD ["bash", "start.sh"]
diff --git a/add_to_team_drive.py b/add_to_team_drive.py
index 2271362af7..b7d0d8997a 100644
--- a/add_to_team_drive.py
+++ b/add_to_team_drive.py
@@ -1,87 +1,154 @@
-from __future__ import print_function
-from google.oauth2.service_account import Credentials
-import googleapiclient.discovery
+import argparse
import json
+from pathlib import Path
+import pickle
import progress.bar
-import glob
-import sys
-import argparse
-import time
-from google_auth_oauthlib.flow import InstalledAppFlow
+from typing import Any, Dict, List, Optional
+
+import google.auth
from google.auth.transport.requests import Request
-import os
-import pickle
+from google.oauth2.credentials import Credentials
+from google_auth_oauthlib.flow import InstalledAppFlow
+from googleapiclient.discovery import build
+from googleapiclient.errors import HttpError
+from googleapiclient.http import BatchHttpRequest
+
+chdrive = build("drive", "v3", credentials=None)
+
+class GoogleDriveTool:
+ """A tool to add service accounts to a shared drive from a folder containing credential files."""
+
+ def __init__(self, drive_id: str, credential_file: str, service_account_dir: str, yes: bool = False):
+ self.drive_id = drive_id
+ self.credential_file = credential_file
+ self.service_account_dir = service_account_dir
+ self.yes = yes
+
+ def _get_service_account_emails(self) -> List[str]:
+ """Get the email addresses of all service accounts in the specified directory."""
+ service_account_files = list(self.service_account_dir.glob("*.json"))
+ if not service_account_files:
+ print(">> No service account files found.")
+ sys.exit(0)
+
+ service_account_emails = []
+ for file in service_account_files:
+ with file.open() as f:
+ data = json.load(f)
+ service_account_emails.append(data["client_email"])
+
+ return service_account_emails
+
+ def _authorize(self) -> Credentials:
+ """Authorize the user and get credentials."""
+ creds = None
+ if Path("token_sa.pickle").exists():
+ with Path("token_sa.pickle").open("rb") as token:
+ creds = pickle.load(token)
+
+ if not creds or not creds.valid:
+ if creds and creds.expired and creds.refresh_token:
+ creds.refresh(Request())
+ else:
+ flow = InstalledAppFlow.from_client_secrets_file(
+ self.credential_file,
+ scopes=[
+ "https://www.googleapis.com/auth/admin.directory.group",
+ "https://www.googleapis.com/auth/admin.directory.group.member",
+ ],
+ )
+ creds = flow.run_console()
+
+ with Path("token_sa.pickle").open("wb") as token:
+ pickle.dump(creds, token)
+
+ return creds
+
+ def _add_service_accounts_to_drive(self, service_account_emails: List[str]):
+ """Add the specified service accounts to the shared drive."""
+ drive = googleapiclient.discovery.build("drive", "v3", credentials=self._authorize())
+ batch = drive.new_batch_http_request()
+
+ for email in service_account_emails:
+ batch.add(
+ drive.permissions().create(
+ fileId=self.drive_id,
+ supportsAllDrives=True,
+ body={
+ "role": "organizer",
+ "type": "user",
+ "emailAddress": email,
+ },
+ )
+ )
+
+ try:
+ batch.execute()
+ except HttpError as error:
+ print(f"An error occurred: {error}")
+ sys.exit(1)
+
+ def run(self):
+ """Run the tool."""
+ start_time = time.time()
+
+ service_account_emails = self._get_service_account_emails()
+
+ if not self.yes:
+ input(
+ f">> Make sure the Google account that has generated {self.credential_file} "
+ "is added into your Team Drive (shared drive) as Manager\n>> (Press any key to continue)"
+ )
+
+ self._add_service_accounts_to_drive(service_account_emails)
+
+ print("Complete.")
+ hours, rem = divmod((time.time() - start_time), 3600)
+ minutes, sec = divmod(rem, 60)
+ print(
+ f"Elapsed Time:\n{int(hours)}:{int(minutes)}:{sec:05.2f}"
+ )
+
+if __name__ == "__main__":
+ parse = argparse.ArgumentParser(
+ description="A tool to add service accounts to a shared drive from a folder containing credential files."
+ )
+ parse.add_argument(
+ "--path",
+ "-p",
+ default="accounts",
+ help="Specify an alternative path to the service accounts folder.",
+ )
+ parse.add_argument(
+ "--credentials",
+ "-c",
+ default="./credentials.json",
+ help="Specify the relative path for the credentials file.",
+ )
+ parse.add_argument(
+ "--yes",
+ "-y",
+ default=False,
+ action="store_true",
+ help="Skips the sanity prompt.",
+ )
+ parsereq = parse.add_argument_group("required arguments")
+ parsereq.add_argument(
+ "--drive-id",
+ "-d",
+ help="The ID of the Shared Drive.",
+ required=True,
+ )
+
+ args = parse.parse_args()
+
+ tool = GoogleDriveTool(args.drive_id, args.credentials, Path(args.path), args.yes)
+ tool.run()
+
+
+pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
+pip install argcomplete
+
-stt = time.time()
-
-parse = argparse.ArgumentParser(
- description='A tool to add service accounts to a shared drive from a folder containing credential files.')
-parse.add_argument('--path', '-p', default='accounts',
- help='Specify an alternative path to the service accounts folder.')
-parse.add_argument('--credentials', '-c', default='./credentials.json',
- help='Specify the relative path for the credentials file.')
-parse.add_argument('--yes', '-y', default=False,
- action='store_true', help='Skips the sanity prompt.')
-parsereq = parse.add_argument_group('required arguments')
-parsereq.add_argument('--drive-id', '-d',
- help='The ID of the Shared Drive.', required=True)
-
-args = parse.parse_args()
-acc_dir = args.path
-did = args.drive_id
-credentials = glob.glob(args.credentials)
-
-try:
- open(credentials[0], 'r')
- print('>> Found credentials.')
-except IndexError:
- print('>> No credentials found.')
- sys.exit(0)
-
-if not args.yes:
- # input('Make sure the following client id is added to the shared drive as Manager:\n' + json.loads((open(
- # credentials[0],'r').read()))['installed']['client_id'])
- input('>> Make sure the **Google account** that has generated credentials.json\n is added into your Team Drive '
- '(shared drive) as Manager\n>> (Press any key to continue)')
-
-creds = None
-if os.path.exists('token_sa.pickle'):
- with open('token_sa.pickle', 'rb') as token:
- creds = pickle.load(token)
-# If there are no (valid) credentials available, let the user log in.
-if not creds or not creds.valid:
- if creds and creds.expired and creds.refresh_token:
- creds.refresh(Request())
- else:
- flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
- 'https://www.googleapis.com/auth/admin.directory.group',
- 'https://www.googleapis.com/auth/admin.directory.group.member'
- ])
- # creds = flow.run_local_server(port=0)
- creds = flow.run_console()
- # Save the credentials for the next run
- with open('token_sa.pickle', 'wb') as token:
- pickle.dump(creds, token)
-
-drive = googleapiclient.discovery.build("drive", "v3", credentials=creds)
-batch = drive.new_batch_http_request()
-
-aa = glob.glob('%s/*.json' % acc_dir)
-pbar = progress.bar.Bar("Readying accounts", max=len(aa))
-for i in aa:
- ce = json.loads(open(i, 'r').read())['client_email']
- batch.add(drive.permissions().create(fileId=did, supportsAllDrives=True, body={
- "role": "organizer",
- "type": "user",
- "emailAddress": ce
- }))
- pbar.next()
-pbar.finish()
-print('Adding...')
-batch.execute()
-
-print('Complete.')
-hours, rem = divmod((time.time() - stt), 3600)
-minutes, sec = divmod(rem, 60)
-print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(
- int(hours), int(minutes), sec))
+import argcomplete
+argcomplete.autocomplete(parse)
diff --git a/bot/__main__.py b/bot/__main__.py
index 6e40f4375c..374ac9249b 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -1,237 +1,127 @@
-from time import time, monotonic
+import asyncio
+import os
+import sys
+import time
+import uuid
+from contextlib import asynccontextmanager
from datetime import datetime
-from sys import executable
-from os import execl as osexecl
-from asyncio import create_subprocess_exec, gather
-from uuid import uuid4
-from base64 import b64decode
-
-from requests import get as rget
-from pytz import timezone
+from typing import Any
+from typing import AsyncContextManager
+from typing import Awaitable
+from typing import Callable
+from typing import Dict
+from typing import Final
+from typing import List
+from typing import NamedTuple
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+import aiofiles
+import aiofiles.os
+import aior Claire
+import httpx
+import pydantic
+import pyttsx3
+import schedule
+import tenacity
+import yaml
+from aerich import Aerich
+from aiogram import Bot
+from aiogram.filters.builder import Filter
+from aiogram.types import CallbackQuery
+from aiogram.types import InlineKeyboardButton
+from aiogram.types import InlineKeyboardMarkup
+from aiogram.types import Message
+from aiogram.types import ParseMode
+from aiogram.utils.executor import Executor
from bs4 import BeautifulSoup
-from signal import signal, SIGINT
-from aiofiles.os import path as aiopath, remove as aioremove
-from aiofiles import open as aiopen
-from pyrogram.handlers import MessageHandler, CallbackQueryHandler
-from pyrogram.filters import command, private, regex
-from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton
-
-from bot import bot, bot_name, config_dict, user_data, botStartTime, LOGGER, Interval, DATABASE_URL, QbInterval, INCOMPLETE_TASK_NOTIFIER, scheduler
-from bot.version import get_version
-from .helper.ext_utils.fs_utils import start_cleanup, clean_all, exit_clean_up
-from .helper.ext_utils.bot_utils import get_readable_time, cmd_exec, sync_to_async, new_task, set_commands, update_user_ldata, get_stats
-from .helper.ext_utils.db_handler import DbManger
-from .helper.telegram_helper.bot_commands import BotCommands
-from .helper.telegram_helper.message_utils import sendMessage, editMessage, editReplyMarkup, sendFile, deleteMessage, delete_all_messages
-from .helper.telegram_helper.filters import CustomFilters
-from .helper.telegram_helper.button_build import ButtonMaker
-from .helper.listeners.aria2_listener import start_aria2_listener
-from .helper.themes import BotTheme
-from .modules import authorize, clone, gd_count, gd_delete, gd_list, cancel_mirror, mirror_leech, status, torrent_search, torrent_select, ytdlp, \
- rss, shell, eval, users_settings, bot_settings, speedtest, save_msg, images, imdb, anilist, mediainfo, mydramalist, gen_pyro_sess, \
- gd_clean, broadcast, category_select
-
-async def stats(client, message):
- msg, btns = await get_stats(message)
- await sendMessage(message, msg, btns, photo='IMAGES')
-
-@new_task
-async def start(client, message):
- buttons = ButtonMaker()
- buttons.ubutton(BotTheme('ST_BN1_NAME'), BotTheme('ST_BN1_URL'))
- buttons.ubutton(BotTheme('ST_BN2_NAME'), BotTheme('ST_BN2_URL'))
- reply_markup = buttons.build_menu(2)
- if len(message.command) > 1 and message.command[1] == "wzmlx":
- await deleteMessage(message)
- elif len(message.command) > 1 and config_dict['TOKEN_TIMEOUT']:
- userid = message.from_user.id
- encrypted_url = message.command[1]
- input_token, pre_uid = (b64decode(encrypted_url.encode()).decode()).split('&&')
- if int(pre_uid) != userid:
- return await sendMessage(message, 'Temporary Token is not yours!\n\nKindly generate your own.')
- data = user_data.get(userid, {})
- if 'token' not in data or data['token'] != input_token:
- return await sendMessage(message, 'Temporary Token already used!\n\nKindly generate a new one.')
- elif config_dict['LOGIN_PASS'] is not None and data['token'] == config_dict['LOGIN_PASS']:
- return await sendMessage(message, 'Bot Already Logged In via Password\n\nNo Need to Accept Temp Tokens.')
- buttons.ibutton('Activate Temporary Token', f'pass {input_token}', 'header')
- reply_markup = buttons.build_menu(2)
- msg = 'Generated Temporary Login Token!\n\n'
- msg += f'Temp Token: {input_token}
\n\n'
- msg += f'Validity: {get_readable_time(int(config_dict["TOKEN_TIMEOUT"]))}'
- return await sendMessage(message, msg, reply_markup)
- elif await CustomFilters.authorized(client, message):
- start_string = BotTheme('ST_MSG', help_command=f"/{BotCommands.HelpCommand}")
- await sendMessage(message, start_string, reply_markup, photo='IMAGES')
- elif config_dict['BOT_PM']:
- await sendMessage(message, BotTheme('ST_BOTPM'), reply_markup, photo='IMAGES')
- else:
- await sendMessage(message, BotTheme('ST_UNAUTH'), reply_markup, photo='IMAGES')
- await DbManger().update_pm_users(message.from_user.id)
-
-
-async def token_callback(_, query):
- user_id = query.from_user.id
- input_token = query.data.split()[1]
- data = user_data.get(user_id, {})
- if 'token' not in data or data['token'] != input_token:
- return await query.answer('Already Used, Generate New One', show_alert=True)
- update_user_ldata(user_id, 'token', str(uuid4()))
- update_user_ldata(user_id, 'time', time())
- await query.answer('Activated Temporary Token!', show_alert=True)
- kb = query.message.reply_markup.inline_keyboard[1:]
- kb.insert(0, [InlineKeyboardButton('✅️ Activated ✅', callback_data='pass activated')])
- await editReplyMarkup(query.message, InlineKeyboardMarkup(kb))
-
-
-async def login(_, message):
- if config_dict['LOGIN_PASS'] is None:
- return
- elif len(message.command) > 1:
- user_id = message.from_user.id
- input_pass = message.command[1]
- if user_data.get(user_id, {}).get('token', '') == config_dict['LOGIN_PASS']:
- return await sendMessage(message, 'Already Bot Login In!')
- if input_pass == config_dict['LOGIN_PASS']:
- update_user_ldata(user_id, 'token', config_dict['LOGIN_PASS'])
- return await sendMessage(message, 'Bot Permanent Login Successfully!')
- else:
- return await sendMessage(message, 'Invalid Password!\n\nKindly put the correct Password .')
- else:
- await sendMessage(message, 'Bot Login Usage :\n\n/cmd {password}
')
-
-
-async def restart(client, message):
- restart_message = await sendMessage(message, BotTheme('RESTARTING'))
- if scheduler.running:
- scheduler.shutdown(wait=False)
- await delete_all_messages()
- for interval in [QbInterval, Interval]:
- if interval:
- interval[0].cancel()
- await sync_to_async(clean_all)
- proc1 = await create_subprocess_exec('pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg|rclone')
- proc2 = await create_subprocess_exec('python3', 'update.py')
- await gather(proc1.wait(), proc2.wait())
- async with aiopen(".restartmsg", "w") as f:
- await f.write(f"{restart_message.chat.id}\n{restart_message.id}\n")
- osexecl(executable, executable, "-m", "bot")
-
-
-async def ping(_, message):
- start_time = monotonic()
- reply = await sendMessage(message, BotTheme('PING'))
- end_time = monotonic()
- await editMessage(reply, BotTheme('PING_VALUE', value=int((end_time - start_time) * 1000)))
-
-
-async def log(_, message):
- buttons = ButtonMaker()
- buttons.ibutton('📑 Log Display', f'wzmlx {message.from_user.id} logdisplay')
- buttons.ibutton('📨 Web Paste', f'wzmlx {message.from_user.id} webpaste')
- await sendFile(message, 'log.txt', buttons=buttons.build_menu(1))
-
-
-async def search_images():
- if query_list := config_dict['IMG_SEARCH']:
- try:
- total_pages = config_dict['IMG_PAGE']
- base_url = "https://www.wallpaperflare.com/search"
- for query in query_list:
- query = query.strip().replace(" ", "+")
- for page in range(1, total_pages + 1):
- url = f"{base_url}?wallpaper={query}&width=1280&height=720&page={page}"
- r = rget(url)
- soup = BeautifulSoup(r.text, "html.parser")
- images = soup.select('img[data-src^="https://c4.wallpaperflare.com/wallpaper"]')
- if len(images) == 0:
- LOGGER.info("Maybe Site is Blocked on your Server, Add Images Manually !!")
- for img in images:
- img_url = img['data-src']
- if img_url not in config_dict['IMAGES']:
- config_dict['IMAGES'].append(img_url)
- if len(config_dict['IMAGES']) != 0:
- config_dict['STATUS_LIMIT'] = 2
- if DATABASE_URL:
- await DbManger().update_config({'IMAGES': config_dict['IMAGES'], 'STATUS_LIMIT': config_dict['STATUS_LIMIT']})
- except Exception as e:
- LOGGER.error(f"An error occurred: {e}")
-
-
-async def bot_help(client, message):
- buttons = ButtonMaker()
- user_id = message.from_user.id
- buttons.ibutton('Basic', f'wzmlx {user_id} guide basic')
- buttons.ibutton('Users', f'wzmlx {user_id} guide users')
- buttons.ibutton('Mics', f'wzmlx {user_id} guide miscs')
- buttons.ibutton('Owner & Sudos', f'wzmlx {user_id} guide admin')
- buttons.ibutton('Close', f'wzmlx {user_id} close')
- await sendMessage(message, "㊂ Help Guide Menu!\n\nNOTE: Click on any CMD to see more minor detalis.", buttons.build_menu(2))
-
-
-async def restart_notification():
- now=datetime.now(timezone(config_dict['TIMEZONE']))
- if await aiopath.isfile(".restartmsg"):
- with open(".restartmsg") as f:
- chat_id, msg_id = map(int, f)
- else:
- chat_id, msg_id = 0, 0
-
- async def send_incompelete_task_message(cid, msg):
- try:
- if msg.startswith("⌬ Restarted Successfully!"):
- await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=msg)
- await aioremove(".restartmsg")
- else:
- await bot.send_message(chat_id=cid, text=msg, disable_web_page_preview=True, disable_notification=True)
- except Exception as e:
- LOGGER.error(e)
-
- if INCOMPLETE_TASK_NOTIFIER and DATABASE_URL:
- if notifier_dict := await DbManger().get_incomplete_tasks():
- for cid, data in notifier_dict.items():
- msg = BotTheme('RESTART_SUCCESS', time=now.strftime('%I:%M:%S %p'), date=now.strftime('%d/%m/%y'), timz=config_dict['TIMEZONE'], version=get_version()) if cid == chat_id else BotTheme('RESTARTED')
- msg += "\n\n⌬ Incomplete Tasks!"
- for tag, links in data.items():
- msg += f"\n➲ {tag}: "
- for index, link in enumerate(links, start=1):
- msg += f" {index} |"
- if len(msg.encode()) > 4000:
- await send_incompelete_task_message(cid, msg)
- msg = ''
- if msg:
- await send_incompelete_task_message(cid, msg)
-
- if await aiopath.isfile(".restartmsg"):
- try:
- await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=BotTheme('RESTART_SUCCESS', time=now.strftime('%I:%M:%S %p'), date=now.strftime('%d/%m/%y'), timz=config_dict['TIMEZONE'], version=get_version()))
- except Exception as e:
- LOGGER.error(e)
- await aioremove(".restartmsg")
-
-
-async def main():
- await gather(start_cleanup(), torrent_search.initiate_search_tools(), restart_notification(), search_images(), set_commands(bot))
- await sync_to_async(start_aria2_listener, wait=False)
-
- bot.add_handler(MessageHandler(
- start, filters=command(BotCommands.StartCommand) & private))
- bot.add_handler(CallbackQueryHandler(
- token_callback, filters=regex(r'^pass')))
- bot.add_handler(MessageHandler(
- login, filters=command(BotCommands.LoginCommand) & private))
- bot.add_handler(MessageHandler(log, filters=command(
- BotCommands.LogCommand) & CustomFilters.sudo))
- bot.add_handler(MessageHandler(restart, filters=command(
- BotCommands.RestartCommand) & CustomFilters.sudo))
- bot.add_handler(MessageHandler(ping, filters=command(
- BotCommands.PingCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
- bot.add_handler(MessageHandler(bot_help, filters=command(
- BotCommands.HelpCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
- bot.add_handler(MessageHandler(stats, filters=command(
- BotCommands.StatsCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
- LOGGER.info(f"WZML-X Bot [@{bot_name}] Started!")
- signal(SIGINT, exit_clean_up)
-
-bot.loop.run_until_complete(main())
-bot.loop.run_forever()
+from humanize import naturalsize
+from pytz import timezone
+from telegram import Update
+from telegram.ext import Application
+from telegram.ext import CommandHandler
+from telegram.ext import ContextTypes
+from telegram.ext import filters
+from telegram.ext import MessageHandler
+from telegram.ext import callbackcontext
+from telegram.ext import conversationhandler
+from telegram.ext import filters
+from telegram.ext import CallbackContext
+from telegram.ext import CallbackQueryHandler
+from telegram.ext import ConversationHandler
+from telegram.ext import Filters
+from telegram.ext import Updater
+from telegram.utils.helpers import mention_html
+
+# Configuration
+CONFIG_DIR: Final[str] = os.path.join(os.path.dirname(__file__), "config")
+CONFIG_FILE: Final[str] = os.path.join(CONFIG_DIR, "config.yaml")
+CONFIG: Final[Dict[str, Any]] = yaml.safe_load(open(CONFIG_FILE))
+
+# Logging
+LOGGER: Final[logging.Logger] = logging.getLogger(__name__)
+
+# Telegram Bot
+BOT_TOKEN: Final[str] = CONFIG["TELEGRAM_BOT_TOKEN"]
+bot: Final[Bot] = Bot(token=BOT_TOKEN)
+
+# Database
+DATABASE_URL: Final[str] = CONFIG["DATABASE_URL"]
+aerich_cfg: Final[Dict[str, Any]] = {
+ "connection": f"postgresql://{DATABASE_URL}",
+ "location": f"sqlalchemy/{os.path.basename(DATABASE_URL)}.sqlite3",
+}
+
+# Application
+app: Final[Application] = Application.builder().token(BOT_TOKEN).build()
+
+# State
+CONVERSATION_STATE: Final[str] = "CONVERSATION"
+
+# Conversation Handlers
+async def start_conversation(update: Update, context: CallbackContext) -> int:
+ # Initialize conversation state
+ context.user_data[CONVERSATION_STATE] = {}
+
+ # Send welcome message
+ await update.message.reply_text(
+ "Welcome to the bot!",
+ reply_markup=InlineKeyboardMarkup(
+ [
+ [
+ InlineKeyboardButton("Help", callback_data="help"),
+ ]
+ ]
+ ),
+ )
+
+ return ConversationHandler.END
+
+async def help_command(update: Update, context: CallbackContext) -> None:
+ # Send help message
+ await update.message.reply_text(
+ "Here is a list of available commands:\n\n"
+ "/start - Start the conversation\n"
+ "/help - Show this help message"
+ )
+
+# Message Handlers
+app.add_handler(CommandHandler("start", start_conversation))
+app.add_handler(CommandHandler("help", help_command))
+
+# Inline Button Handlers
+@app.callback_query_handler(lambda c: c.data == "help")
+async def help_button(update: Update, context: CallbackContext) -> None:
+ # Send help message
+ await update.callback_query.answer()
+ await update.callback_query.message.edit_text(
+ "Here is a list of available commands:\n\n"
+ "/start - Start the conversation\n"
+ "/help - Show this help message"
+ )
+
+# Executor
+if __name__ == "__main__":
+ executor: Final[Executor] = Executor(app)
+ executor.start_polling()
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index e23c4954c8..1aaeefcf34 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -1,62 +1,63 @@
#!/usr/bin/env python3
-import platform
-from base64 import b64encode
+import os
+import re
+import shutil
+import string
+import time
+import uuid
+from asyncio import create_subprocess_exec, run_coroutine_threadsafe, sleep
+from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
-from os import path as ospath
-from pkg_resources import get_distribution
-from aiofiles import open as aiopen
-from aiofiles.os import remove as aioremove, path as aiopath, mkdir
-from re import match as re_match
-from time import time
+from functools import partial, wraps
from html import escape
-from uuid import uuid4
+from os.path import exists, join
+from pkg_resources import get_distribution
from subprocess import run as srun
-from psutil import disk_usage, disk_io_counters, Process, cpu_percent, swap_memory, cpu_count, cpu_freq, getloadavg, virtual_memory, net_io_counters, boot_time
-from asyncio import create_subprocess_exec, create_subprocess_shell, run_coroutine_threadsafe, sleep
-from asyncio.subprocess import PIPE
-from functools import partial, wraps
-from concurrent.futures import ThreadPoolExecutor
-
-from aiohttp import ClientSession as aioClientSession
-from psutil import virtual_memory, cpu_percent, disk_usage
-from requests import get as rget
+from time import gmtime, strftime
+from typing import List, Union
+from urllib.parse import unquote
+
+import aiofiles
+import aiohttp
+import psutil
+import requests
+import yt_dlp
+from bs4 import BeautifulSoup
from mega import MegaApi
+from pyrogram import Client, filters
from pyrogram.enums import ChatType
-from pyrogram.types import BotCommand
from pyrogram.errors import PeerIdInvalid
+from pyrogram.types import BotCommand, InlineKeyboardButton, InlineKeyboardMarkup, Message
from bot.helper.ext_utils.db_handler import DbManger
-from bot.helper.themes import BotTheme
-from bot.version import get_version
-from bot import OWNER_ID, bot_name, bot_cache, DATABASE_URL, LOGGER, get_client, aria2, download_dict, download_dict_lock, botStartTime, user_data, config_dict, bot_loop, extra_buttons, user
-from bot.helper.telegram_helper.bot_commands import BotCommands
-from bot.helper.telegram_helper.button_build import ButtonMaker
-from bot.helper.ext_utils.telegraph_helper import telegraph
from bot.helper.ext_utils.shortners import short_url
+from bot.helper.telegram_helper.button_build import ButtonMaker
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.message_utils import sendMessage, sendMarkup
+from bot.version import get_version
+from bot.ytdl_handler import ytdl_download
-THREADPOOL = ThreadPoolExecutor(max_workers=1000)
-MAGNET_REGEX = r'magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*'
-URL_REGEX = r'^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$'
-SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']
+THREADPOOL = ThreadPoolExecutor(max_workers=1000)
+MAGNET_REGEX = r"magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*"
+URL_REGEX = r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$"
+SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB", "EB"]
STATUS_START = 0
-PAGES = 1
-PAGE_NO = 1
-
+PAGES = 1
+PAGE_NO = 1
class MirrorStatus:
- STATUS_UPLOADING = "Upload"
+ STATUS_UPLOADING = "Upload"
STATUS_DOWNLOADING = "Download"
- STATUS_CLONING = "Clone"
- STATUS_QUEUEDL = "QueueDL"
- STATUS_QUEUEUP = "QueueUp"
- STATUS_PAUSED = "Pause"
- STATUS_ARCHIVING = "Archive"
- STATUS_EXTRACTING = "Extract"
- STATUS_SPLITTING = "Split"
- STATUS_CHECKING = "CheckUp"
- STATUS_SEEDING = "Seed"
- STATUS_UPLOADDDL = "Upload DDL"
-
+ STATUS_CLONING = "Clone"
+ STATUS_QUEUEDL = "QueueDL"
+ STATUS_QUEUEUP = "QueueUp"
+ STATUS_PAUSED = "Pause"
+ STATUS_ARCHIVING = "Archive"
+ STATUS_EXTRACTING = "Extract"
+ STATUS_SPLITTING = "Split"
+ STATUS_CHECKING = "CheckUp"
+ STATUS_SEEDING = "Seed"
+ STATUS_UPLOADDDL = "Upload DDL"
class setInterval:
def __init__(self, interval, action):
@@ -72,636 +73,9 @@ async def __set_interval(self):
def cancel(self):
self.task.cancel()
-
def get_readable_file_size(size_in_bytes):
if size_in_bytes is None:
return '0B'
index = 0
while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1:
- size_in_bytes /= 1024
- index += 1
- return f'{size_in_bytes:.2f}{SIZE_UNITS[index]}' if index > 0 else f'{size_in_bytes}B'
-
-
-async def getDownloadByGid(gid):
- async with download_dict_lock:
- return next((dl for dl in download_dict.values() if dl.gid() == gid), None)
-
-
-async def getAllDownload(req_status, user_id=None):
- dls = []
- async with download_dict_lock:
- for dl in list(download_dict.values()):
- if user_id and user_id != dl.message.from_user.id:
- continue
- status = dl.status()
- if req_status in ['all', status]:
- dls.append(dl)
- return dls
-
-
-async def get_user_tasks(user_id, maxtask):
- if tasks := await getAllDownload('all', user_id):
- return len(tasks) >= maxtask
-
-
-def bt_selection_buttons(id_):
- gid = id_[:12] if len(id_) > 20 else id_
- pincode = ''.join([n for n in id_ if n.isdigit()][:4])
- buttons = ButtonMaker()
- BASE_URL = config_dict['BASE_URL']
- if config_dict['WEB_PINCODE']:
- buttons.ubutton("Select Files", f"{BASE_URL}/app/files/{id_}")
- buttons.ibutton("Pincode", f"btsel pin {gid} {pincode}")
- else:
- buttons.ubutton("Select Files", f"{BASE_URL}/app/files/{id_}?pin_code={pincode}")
- buttons.ibutton("Cancel", f"btsel rm {gid} {id_}")
- buttons.ibutton("Done Selecting", f"btsel done {gid} {id_}")
- return buttons.build_menu(2)
-
-
-async def get_telegraph_list(telegraph_content):
- path = [(await telegraph.create_page(title=f"{config_dict['TITLE_NAME']} Drive Search", content=content))["path"] for content in telegraph_content]
- if len(path) > 1:
- await telegraph.edit_telegraph(path, telegraph_content)
- buttons = ButtonMaker()
- buttons.ubutton("🔎 VIEW", f"https://telegra.ph/{path[0]}")
- buttons = extra_btns(buttons)
- return buttons.build_menu(1)
-
-def handleIndex(index, dic):
- while True:
- if abs(index) >= len(dic):
- if index < 0: index = len(dic) - abs(index)
- elif index > 0: index = index - len(dic)
- else: break
- return index
-
-def get_progress_bar_string(pct):
- pct = float(str(pct).strip('%'))
- p = min(max(pct, 0), 100)
- cFull = int(p // 8)
- cPart = int(p % 8 - 1)
- p_str = '■' * cFull
- if cPart >= 0:
- p_str += ['▤', '▥', '▦', '▧', '▨', '▩', '■'][cPart]
- p_str += '□' * (12 - cFull)
- return f"[{p_str}]"
-
-
-def get_all_versions():
- try:
- result = srun(['7z', '-version'], capture_output=True, text=True)
- vp = result.stdout.split('\n')[2].split(' ')[2]
- except FileNotFoundError:
- vp = ''
- try:
- result = srun(['ffmpeg', '-version'], capture_output=True, text=True)
- vf = result.stdout.split('\n')[0].split(' ')[2].split('ubuntu')[0]
- except FileNotFoundError:
- vf = ''
- try:
- result = srun(['rclone', 'version'], capture_output=True, text=True)
- vr = result.stdout.split('\n')[0].split(' ')[1]
- except FileNotFoundError:
- vr = ''
- bot_cache['eng_versions'] = {'p7zip':vp, 'ffmpeg': vf, 'rclone': vr,
- 'aria': aria2.client.get_version()['version'],
- 'aiohttp': get_distribution('aiohttp').version,
- 'gapi': get_distribution('google-api-python-client').version,
- 'mega': MegaApi('test').getVersion(),
- 'qbit': get_client().app.version,
- 'pyro': get_distribution('pyrogram').version,
- 'ytdlp': get_distribution('yt-dlp').version}
-
-
-class EngineStatus:
- def __init__(self):
- if not (version_cache := bot_cache.get('eng_versions')):
- get_all_versions()
- version_cache = bot_cache.get('eng_versions')
- self.STATUS_ARIA = f"Aria2 v{version_cache['aria']}"
- self.STATUS_AIOHTTP = f"AioHttp {version_cache['aiohttp']}"
- self.STATUS_GD = f"Google-API v{version_cache['gapi']}"
- self.STATUS_MEGA = f"MegaSDK v{version_cache['mega']}"
- self.STATUS_QB = f"qBit {version_cache['qbit']}"
- self.STATUS_TG = f"Pyrogram v{version_cache['pyro']}"
- self.STATUS_YT = f"yt-dlp v{version_cache['ytdlp']}"
- self.STATUS_EXT = "pExtract v2"
- self.STATUS_SPLIT_MERGE = f"ffmpeg v{version_cache['ffmpeg']}"
- self.STATUS_ZIP = f"p7zip v{version_cache['p7zip']}"
- self.STATUS_QUEUE = "Sleep v0"
- self.STATUS_RCLONE = f"RClone {version_cache['rclone']}"
-
-
-def get_readable_message():
- msg = ""
- button = None
- STATUS_LIMIT = config_dict['STATUS_LIMIT']
- tasks = len(download_dict)
- globals()['PAGES'] = (tasks + STATUS_LIMIT - 1) // STATUS_LIMIT
- if PAGE_NO > PAGES and PAGES != 0:
- globals()['STATUS_START'] = STATUS_LIMIT * (PAGES - 1)
- globals()['PAGE_NO'] = PAGES
- for download in list(download_dict.values())[STATUS_START:STATUS_LIMIT+STATUS_START]:
- msg_link = download.message.link if download.message.chat.type in [
- ChatType.SUPERGROUP, ChatType.CHANNEL] and not config_dict['DELETE_LINKS'] else ''
- msg += BotTheme('STATUS_NAME', Name="Task is being Processed!" if config_dict['SAFE_MODE'] else escape(f'{download.name()}'))
- if download.status() not in [MirrorStatus.STATUS_SPLITTING, MirrorStatus.STATUS_SEEDING]:
- if download.status() != MirrorStatus.STATUS_UPLOADDDL:
- msg += BotTheme('BAR', Bar=f"{get_progress_bar_string(download.progress())} {download.progress()}")
- msg += BotTheme('PROCESSED', Processed=f"{download.processed_bytes()} of {download.size()}")
- msg += BotTheme('STATUS', Status=download.status(), Url=msg_link)
- if download.status() != MirrorStatus.STATUS_UPLOADDDL:
- msg += BotTheme('ETA', Eta=download.eta())
- msg += BotTheme('SPEED', Speed=download.speed())
- msg += BotTheme('ELAPSED', Elapsed=get_readable_time(time() - download.message.date.timestamp()))
- msg += BotTheme('ENGINE', Engine=download.eng())
- msg += BotTheme('STA_MODE', Mode=download.upload_details['mode'])
- if hasattr(download, 'seeders_num'):
- try:
- msg += BotTheme('SEEDERS', Seeders=download.seeders_num())
- msg += BotTheme('LEECHERS', Leechers=download.leechers_num())
- except:
- pass
- elif download.status() == MirrorStatus.STATUS_SEEDING:
- msg += BotTheme('STATUS', Status=download.status(), Url=msg_link)
- msg += BotTheme('SEED_SIZE', Size=download.size())
- msg += BotTheme('SEED_SPEED', Speed=download.upload_speed())
- msg += BotTheme('UPLOADED', Upload=download.uploaded_bytes())
- msg += BotTheme('RATIO', Ratio=download.ratio())
- msg += BotTheme('TIME', Time=download.seeding_time())
- msg += BotTheme('SEED_ENGINE', Engine=download.eng())
- else:
- msg += BotTheme('STATUS', Status=download.status(), Url=msg_link)
- msg += BotTheme('STATUS_SIZE', Size=download.size())
- msg += BotTheme('NON_ENGINE', Engine=download.eng())
-
- msg += BotTheme('USER',
- User=download.message.from_user.mention(style="html"))
- msg += BotTheme('ID', Id=download.message.from_user.id)
- if (download.eng()).startswith("qBit"):
- msg += BotTheme('BTSEL', Btsel=f"/{BotCommands.BtSelectCommand}_{download.gid()}")
- msg += BotTheme('CANCEL', Cancel=f"/{BotCommands.CancelMirror}_{download.gid()}")
-
- if len(msg) == 0:
- return None, None
-
- dl_speed = 0
-
- def convert_speed_to_bytes_per_second(spd):
- if 'K' in spd:
- return float(spd.split('K')[0]) * 1024
- elif 'M' in spd:
- return float(spd.split('M')[0]) * 1048576
- else:
- return 0
-
- dl_speed = 0
- up_speed = 0
- for download in download_dict.values():
- tstatus = download.status()
- spd = download.speed() if tstatus != MirrorStatus.STATUS_SEEDING else download.upload_speed()
- speed_in_bytes_per_second = convert_speed_to_bytes_per_second(spd)
- if tstatus == MirrorStatus.STATUS_DOWNLOADING:
- dl_speed += speed_in_bytes_per_second
- elif tstatus == MirrorStatus.STATUS_UPLOADING or tstatus == MirrorStatus.STATUS_SEEDING:
- up_speed += speed_in_bytes_per_second
-
- msg += BotTheme('FOOTER')
- buttons = ButtonMaker()
- buttons.ibutton(BotTheme('REFRESH', Page=f"{PAGE_NO}/{PAGES}"), "status ref")
- if tasks > STATUS_LIMIT:
- if config_dict['BOT_MAX_TASKS']:
- msg += BotTheme('BOT_TASKS', Tasks=tasks, Ttask=config_dict['BOT_MAX_TASKS'], Free=config_dict['BOT_MAX_TASKS']-tasks)
- else:
- msg += BotTheme('TASKS', Tasks=tasks)
- buttons = ButtonMaker()
- buttons.ibutton(BotTheme('PREVIOUS'), "status pre")
- buttons.ibutton(BotTheme('REFRESH', Page=f"{PAGE_NO}/{PAGES}"), "status ref")
- buttons.ibutton(BotTheme('NEXT'), "status nex")
- button = buttons.build_menu(3)
- msg += BotTheme('Cpu', cpu=cpu_percent())
- msg += BotTheme('FREE', free=get_readable_file_size(disk_usage(config_dict['DOWNLOAD_DIR']).free), free_p=round(100-disk_usage(config_dict['DOWNLOAD_DIR']).percent, 1))
- msg += BotTheme('Ram', ram=virtual_memory().percent)
- msg += BotTheme('uptime', uptime=get_readable_time(time() - botStartTime))
- msg += BotTheme('DL', DL=get_readable_file_size(dl_speed))
- msg += BotTheme('UL', UL=get_readable_file_size(up_speed))
- return msg, button
-
-
-async def turn_page(data):
- STATUS_LIMIT = config_dict['STATUS_LIMIT']
- global STATUS_START, PAGE_NO
- async with download_dict_lock:
- if data[1] == "nex":
- if PAGE_NO == PAGES:
- STATUS_START = 0
- PAGE_NO = 1
- else:
- STATUS_START += STATUS_LIMIT
- PAGE_NO += 1
- elif data[1] == "pre":
- if PAGE_NO == 1:
- STATUS_START = STATUS_LIMIT * (PAGES - 1)
- PAGE_NO = PAGES
- else:
- STATUS_START -= STATUS_LIMIT
- PAGE_NO -= 1
-
-
-def get_readable_time(seconds):
- periods = [('d', 86400), ('h', 3600), ('m', 60), ('s', 1)]
- result = ''
- for period_name, period_seconds in periods:
- if seconds >= period_seconds:
- period_value, seconds = divmod(seconds, period_seconds)
- result += f'{int(period_value)}{period_name}'
- return result
-
-
-def is_magnet(url):
- return bool(re_match(MAGNET_REGEX, url))
-
-
-def is_url(url):
- return bool(re_match(URL_REGEX, url))
-
-
-def is_gdrive_link(url):
- return "drive.google.com" in url
-
-
-def is_telegram_link(url):
- return url.startswith(('https://t.me/', 'https://telegram.me/', 'https://telegram.dog/', 'https://telegram.space/', 'tg://openmessage?user_id='))
-
-
-def is_share_link(url):
- return bool(re_match(r'https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+', url))
-
-
-def is_mega_link(url):
- return "mega.nz" in url or "mega.co.nz" in url
-
-
-def is_rclone_path(path):
- return bool(re_match(r'^(mrcc:)?(?!magnet:)(?![- ])[a-zA-Z0-9_\. -]+(? v2_part:
- return "More Updated! Kindly Contribute in Official"
- return "Already up to date with latest version"
-
-
-async def get_stats(event, key="home"):
- user_id = event.from_user.id
- btns = ButtonMaker()
- btns.ibutton('Back', f'wzmlx {user_id} stats home')
- if key == "home":
- btns = ButtonMaker()
- btns.ibutton('Bot Stats', f'wzmlx {user_id} stats stbot')
- btns.ibutton('OS Stats', f'wzmlx {user_id} stats stsys')
- btns.ibutton('Repo Stats', f'wzmlx {user_id} stats strepo')
- btns.ibutton('Bot Limits', f'wzmlx {user_id} stats botlimits')
- msg = "⌬ Bot & OS Statistics!"
- elif key == "stbot":
- total, used, free, disk = disk_usage('/')
- swap = swap_memory()
- memory = virtual_memory()
- msg = BotTheme('BOT_STATS',
- bot_uptime=get_readable_time(time() - botStartTime),
- ram_bar=get_progress_bar_string(memory.percent),
- ram=memory.percent,
- ram_u=get_readable_file_size(memory.used),
- ram_f=get_readable_file_size(memory.available),
- ram_t=get_readable_file_size(memory.total),
- swap_bar=get_progress_bar_string(swap.percent),
- swap=swap.percent,
- swap_u=get_readable_file_size(swap.used),
- swap_f=get_readable_file_size(swap.free),
- swap_t=get_readable_file_size(swap.total),
- disk=disk,
- disk_bar=get_progress_bar_string(disk),
- disk_read=get_readable_file_size(disk_io_counters().read_bytes) + f" ({get_readable_time(disk_io_counters().read_time / 1000)})",
- disk_write=get_readable_file_size(disk_io_counters().write_bytes) + f" ({get_readable_time(disk_io_counters().write_time / 1000)})",
- disk_t=get_readable_file_size(total),
- disk_u=get_readable_file_size(used),
- disk_f=get_readable_file_size(free),
- )
- elif key == "stsys":
- cpuUsage = cpu_percent(interval=0.5)
- msg = BotTheme('SYS_STATS',
- os_uptime=get_readable_time(time() - boot_time()),
- os_version=platform.version(),
- os_arch=platform.platform(),
- up_data=get_readable_file_size(net_io_counters().bytes_sent),
- dl_data=get_readable_file_size(net_io_counters().bytes_recv),
- pkt_sent=str(net_io_counters().packets_sent)[:-3],
- pkt_recv=str(net_io_counters().packets_recv)[:-3],
- tl_data=get_readable_file_size(net_io_counters().bytes_recv + net_io_counters().bytes_sent),
- cpu=cpuUsage,
- cpu_bar=get_progress_bar_string(cpuUsage),
- cpu_freq=f"{cpu_freq(percpu=False).current / 1000:.2f} GHz" if cpu_freq() else "Access Denied",
- sys_load="%, ".join(str(round((x / cpu_count() * 100), 2)) for x in getloadavg()) + "%, (1m, 5m, 15m)",
- p_core=cpu_count(logical=False),
- v_core=cpu_count(logical=True) - cpu_count(logical=False),
- total_core=cpu_count(logical=True),
- cpu_use=len(Process().cpu_affinity()),
- )
- elif key == "strepo":
- last_commit, changelog = 'No Data', 'N/A'
- if await aiopath.exists('.git'):
- last_commit = (await cmd_exec("git log -1 --pretty='%cd ( %cr )' --date=format-local:'%d/%m/%Y'", True))[0]
- changelog = (await cmd_exec("git log -1 --pretty=format:'%s
By %an'", True))[0]
- official_v = (await cmd_exec("curl -o latestversion.py https://raw.githubusercontent.com/weebzone/WZML-X/master/bot/version.py -s && python3 latestversion.py && rm latestversion.py", True))[0]
- msg = BotTheme('REPO_STATS',
- last_commit=last_commit,
- bot_version=get_version(),
- lat_version=official_v,
- commit_details=changelog,
- remarks=await compare_versions(get_version(), official_v),
- )
- elif key == "botlimits":
- msg = BotTheme('BOT_LIMITS',
- DL = ('∞' if (val := config_dict['DIRECT_LIMIT']) == '' else val),
- TL = ('∞' if (val := config_dict['TORRENT_LIMIT']) == '' else val),
- GL = ('∞' if (val := config_dict['GDRIVE_LIMIT']) == '' else val),
- YL = ('∞' if (val := config_dict['YTDLP_LIMIT']) == '' else val),
- PL = ('∞' if (val := config_dict['PLAYLIST_LIMIT']) == '' else val),
- CL = ('∞' if (val := config_dict['CLONE_LIMIT']) == '' else val),
- ML = ('∞' if (val := config_dict['MEGA_LIMIT']) == '' else val),
- LL = ('∞' if (val := config_dict['LEECH_LIMIT']) == '' else val),
- TV = ('Disabled' if (val := config_dict['TOKEN_TIMEOUT']) == '' else get_readable_time(val)),
- UTI = ('Disabled' if (val := config_dict['USER_TIME_INTERVAL']) == 0 else get_readable_time(val)),
- UT = ('∞' if (val := config_dict['USER_MAX_TASKS']) == '' else val),
- BT = ('∞' if (val := config_dict['BOT_MAX_TASKS']) == '' else val),
- )
- btns.ibutton('Close', f'wzmlx {user_id} close')
- return msg, btns.build_menu(2)
-
-
-async def getdailytasks(user_id, increase_task=False, upleech=0, upmirror=0, check_mirror=False, check_leech=False):
- task, lsize, msize = 0, 0, 0
- if user_id in user_data and user_data[user_id].get('dly_tasks'):
- userdate, task, lsize, msize = user_data[user_id]['dly_tasks']
- nowdate = datetime.today()
- if userdate.year <= nowdate.year and userdate.month <= nowdate.month and userdate.day < nowdate.day:
- task, lsize, msize = 0, 0, 0
- if increase_task:
- task = 1
- elif upleech != 0:
- lsize += upleech
- elif upmirror != 0:
- msize += upmirror
- else:
- if increase_task:
- task += 1
- elif upleech != 0:
- lsize += upleech
- elif upmirror != 0:
- msize += upmirror
- else:
- if increase_task:
- task += 1
- elif upleech != 0:
- lsize += upleech
- elif upmirror != 0:
- msize += upmirror
- update_user_ldata(user_id, 'dly_tasks', [
- datetime.today(), task, lsize, msize])
- if DATABASE_URL:
- await DbManger().update_user_data(user_id)
- if check_leech:
- return lsize
- elif check_mirror:
- return msize
- return task
-
-
-async def fetch_user_tds(user_id, force=False):
- user_dict = user_data.get(user_id, {})
- if config_dict['USER_TD_MODE'] and user_dict.get('td_mode', False) or force:
- return user_dict.get('user_tds', {})
- return {}
-
-
-async def fetch_user_dumps(user_id):
- user_dict = user_data.get(user_id, {})
- if (dumps := user_dict.get('ldump', False)):
- if not isinstance(dumps, dict):
- update_user_ldata(user_id, 'ldump', {})
- return {}
- return dumps
- return {}
-
-
-async def checking_access(user_id, button=None):
- if not config_dict['TOKEN_TIMEOUT'] or bool(user_id == OWNER_ID or user_id in user_data and user_data[user_id].get('is_sudo')):
- return None, button
- user_data.setdefault(user_id, {})
- data = user_data[user_id]
- expire = data.get('time')
- if config_dict['LOGIN_PASS'] is not None and data.get('token', '') == config_dict['LOGIN_PASS']:
- return None, button
- isExpired = (expire is None or expire is not None and (time() - expire) > config_dict['TOKEN_TIMEOUT'])
- if isExpired:
- token = data['token'] if expire is None and 'token' in data else str(uuid4())
- if expire is not None:
- del data['time']
- data['token'] = token
- user_data[user_id].update(data)
- if button is None:
- button = ButtonMaker()
- encrypt_url = b64encode(f"{token}&&{user_id}".encode()).decode()
- button.ubutton('Generate New Token', short_url(f'https://t.me/{bot_name}?start={encrypt_url}'))
- return f'Temporary Token has been expired, Kindly generate a New Temp Token to start using bot Again.\nValidity : {get_readable_time(config_dict["TOKEN_TIMEOUT"])}
', button
- return None, button
-
-
-def extra_btns(buttons):
- if extra_buttons:
- for btn_name, btn_url in extra_buttons.items():
- buttons.ubutton(btn_name, btn_url)
- return buttons
-
-
-async def set_commands(client):
- if config_dict['SET_COMMANDS']:
- try:
- bot_cmds = [
- BotCommand(BotCommands.MirrorCommand[0], f'or /{BotCommands.MirrorCommand[1]} Mirror [links/media/rclone_path]'),
- BotCommand(BotCommands.LeechCommand[0], f'or /{BotCommands.LeechCommand[1]} Leech [links/media/rclone_path]'),
- BotCommand(BotCommands.QbMirrorCommand[0], f'or /{BotCommands.QbMirrorCommand[1]} Mirror magnet/torrent using qBittorrent'),
- BotCommand(BotCommands.QbLeechCommand[0], f'or /{BotCommands.QbLeechCommand[1]} Leech magnet/torrent using qBittorrent'),
- BotCommand(BotCommands.YtdlCommand[0], f'or /{BotCommands.YtdlCommand[1]} Mirror yt-dlp supported links via bot'),
- BotCommand(BotCommands.YtdlLeechCommand[0], f'or /{BotCommands.YtdlLeechCommand[1]} Leech yt-dlp supported links via bot'),
- BotCommand(BotCommands.CloneCommand[0], f'or /{BotCommands.CloneCommand[1]} Copy file/folder to Drive (GDrive/RClone)'),
- BotCommand(BotCommands.CountCommand, '[drive_url]: Count file/folder of Google Drive/RClone Drives'),
- BotCommand(BotCommands.StatusCommand[0], f'or /{BotCommands.StatusCommand[1]} Get Bot All Status Stats Message'),
- BotCommand(BotCommands.StatsCommand[0], f'or /{BotCommands.StatsCommand[1]} Check Bot & System stats'),
- BotCommand(BotCommands.BtSelectCommand, 'Select files to download only torrents/magnet qbit/aria2c'),
- BotCommand(BotCommands.CategorySelect, 'Select Upload Category with UserTD or Bot Categories to upload only GDrive upload'),
- BotCommand(BotCommands.CancelMirror, 'Cancel a Task of yours!'),
- BotCommand(BotCommands.CancelAllCommand[0], f'Cancel all Tasks in whole Bots.'),
- BotCommand(BotCommands.ListCommand, 'Search in Drive(s)'),
- BotCommand(BotCommands.SearchCommand, 'Search in Torrent via qBit clients!'),
- BotCommand(BotCommands.HelpCommand, 'Get detailed help about the WZML-X Bot'),
- BotCommand(BotCommands.UserSetCommand[0], f"or /{BotCommands.UserSetCommand[1]} User's Personal Settings (Open in PM)"),
- BotCommand(BotCommands.IMDBCommand, 'Search Movies/Series on IMDB.com and fetch details'),
- BotCommand(BotCommands.AniListCommand, 'Search Animes on AniList.com and fetch details'),
- BotCommand(BotCommands.MyDramaListCommand, 'Search Dramas on MyDramaList.com and fetch details'),
- BotCommand(BotCommands.SpeedCommand[0], f'or /{BotCommands.SpeedCommand[1]} Check Server Up & Down Speed & Details'),
- BotCommand(BotCommands.MediaInfoCommand[0], f'or /{BotCommands.MediaInfoCommand[1]} Generate Mediainfo for Replied Media or DL links'),
- BotCommand(BotCommands.BotSetCommand[0], f"or /{BotCommands.BotSetCommand[1]} Bot's Personal Settings (Owner or Sudo Only)"),
- BotCommand(BotCommands.RestartCommand[0], f'or /{BotCommands.RestartCommand[1]} Restart & Update the Bot (Owner or Sudo Only)'),
- ]
- if config_dict['SHOW_EXTRA_CMDS']:
- bot_cmds.insert(1, BotCommand(BotCommands.MirrorCommand[2], f'or /{BotCommands.MirrorCommand[3]} Mirror and UnZip [links/media/rclone_path]'))
- bot_cmds.insert(1, BotCommand(BotCommands.MirrorCommand[4], f'or /{BotCommands.MirrorCommand[5]} Mirror and Zip [links/media/rclone_path]'))
- bot_cmds.insert(4, BotCommand(BotCommands.LeechCommand[2], f'or /{BotCommands.LeechCommand[3]} Leech and UnZip [links/media/rclone_path]'))
- bot_cmds.insert(4, BotCommand(BotCommands.LeechCommand[4], f'or /{BotCommands.LeechCommand[5]} Leech and Zip [links/media/rclone_path]'))
- bot_cmds.insert(7, BotCommand(BotCommands.QbMirrorCommand[2], f'or /{BotCommands.QbMirrorCommand[3]} Mirror magnet/torrent and UnZip using qBit'))
- bot_cmds.insert(7, BotCommand(BotCommands.QbMirrorCommand[4], f'or /{BotCommands.QbMirrorCommand[5]} Mirror magnet/torrent and Zip using qBit'))
- bot_cmds.insert(10, BotCommand(BotCommands.QbLeechCommand[2], f'or /{BotCommands.QbLeechCommand[3]} Leech magnet/torrent and UnZip using qBit'))
- bot_cmds.insert(10, BotCommand(BotCommands.QbLeechCommand[4], f'or /{BotCommands.QbLeechCommand[5]} Leech magnet/torrent and Zip using qBit'))
- bot_cmds.insert(13, BotCommand(BotCommands.YtdlCommand[2], f'or /{BotCommands.YtdlCommand[3]} Mirror yt-dlp supported links and Zip via bot'))
- bot_cmds.insert(13, BotCommand(BotCommands.YtdlLeechCommand[2], f'or /{BotCommands.YtdlLeechCommand[3]} Leech yt-dlp supported links and Zip via bot'))
- await client.set_bot_commands(bot_cmds)
- LOGGER.info('Bot Commands have been Set & Updated')
- except Exception as err:
- LOGGER.error(err)
-
-
-def is_valid_token(url, token):
- resp = rget(url=f"{url}getAccountDetails?token={token}&allDetails=true").json()
- if resp["status"] == "error-wrongToken":
- raise Exception("Invalid Gofile Token, Get your Gofile token from --> https://gofile.io/myProfile")
+ size_in_bytes /= 102
diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py
index 5dc567c173..cd7b40fa43 100644
--- a/bot/helper/ext_utils/db_handler.py
+++ b/bot/helper/ext_utils/db_handler.py
@@ -1,212 +1,49 @@
#!/usr/bin/env python3
+import pathlib
+from typing import Any, Dict, List, Optional
+
+import aiofiles
from aiofiles.os import path as aiopath, makedirs
-from aiofiles import open as aiopen
+from aiorwlock import RWLock
from motor.motor_asyncio import AsyncIOMotorClient
from pymongo.errors import PyMongoError
from dotenv import dotenv_values
-from bot import DATABASE_URL, user_data, rss_dict, LOGGER, bot_id, config_dict, aria2_options, qbit_options, bot_loop
+# ... other imports ...
+class DbManager:
+ """Database manager class"""
-class DbManger:
def __init__(self):
self.__err = False
self.__db = None
- self.__conn = None
+ self.__conn_pool = None
self.__connect()
def __connect(self):
+ """Connect to the database"""
try:
- self.__conn = AsyncIOMotorClient(DATABASE_URL)
- self.__db = self.__conn.wzmlx # New Section for not conflicting with mltb section !!
+ self.__conn_pool = AsyncIOMotorClient(DATABASE_URL, maxPoolSize=5, minPoolSize=5)
+ self.__db = self.__conn_pool.wzmlx
except PyMongoError as e:
LOGGER.error(f"Error in DB connection: {e}")
self.__err = True
async def db_load(self):
+ """Load data from the database"""
if self.__err:
return
- # Save bot settings
- await self.__db.settings.config.update_one({'_id': bot_id}, {'$set': config_dict}, upsert=True)
- # Save Aria2c options
- if await self.__db.settings.aria2c.find_one({'_id': bot_id}) is None:
- await self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': aria2_options}, upsert=True)
- # Save qbittorrent options
- if await self.__db.settings.qbittorrent.find_one({'_id': bot_id}) is None:
- await self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': qbit_options}, upsert=True)
- # User Data
- if await self.__db.users[bot_id].find_one():
- rows = self.__db.users[bot_id].find({})
- # return a dict ==> {_id, is_sudo, is_auth, as_doc, thumb, yt_opt, media_group, equal_splits, split_size, rclone}
- async for row in rows:
- uid = row['_id']
- del row['_id']
- thumb_path = f'Thumbnails/{uid}.jpg'
- rclone_path = f'rclone/{uid}.conf'
- if row.get('thumb'):
- if not await aiopath.exists('Thumbnails'):
- await makedirs('Thumbnails')
- async with aiopen(thumb_path, 'wb+') as f:
- await f.write(row['thumb'])
- row['thumb'] = thumb_path
- if row.get('rclone'):
- if not await aiopath.exists('rclone'):
- await makedirs('rclone')
- async with aiopen(rclone_path, 'wb+') as f:
- await f.write(row['rclone'])
- row['rclone'] = rclone_path
- user_data[uid] = row
- LOGGER.info("Users data has been imported from Database")
- # Rss Data
- if await self.__db.rss[bot_id].find_one():
- # return a dict ==> {_id, title: {link, last_feed, last_name, inf, exf, command, paused}
- rows = self.__db.rss[bot_id].find({})
- async for row in rows:
- user_id = row['_id']
- del row['_id']
- rss_dict[user_id] = row
- LOGGER.info("Rss data has been imported from Database.")
- self.__conn.close
-
- async def update_deploy_config(self):
- if self.__err:
- return
- current_config = dict(dotenv_values('config.env'))
- await self.__db.settings.deployConfig.replace_one({'_id': bot_id}, current_config, upsert=True)
- self.__conn.close
-
- async def update_config(self, dict_):
- if self.__err:
- return
- await self.__db.settings.config.update_one({'_id': bot_id}, {'$set': dict_}, upsert=True)
- self.__conn.close
-
- async def update_aria2(self, key, value):
- if self.__err:
- return
- await self.__db.settings.aria2c.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True)
- self.__conn.close
-
- async def update_qbittorrent(self, key, value):
- if self.__err:
- return
- await self.__db.settings.qbittorrent.update_one({'_id': bot_id}, {'$set': {key: value}}, upsert=True)
- self.__conn.close
-
- async def update_private_file(self, path):
- if self.__err:
- return
- if await aiopath.exists(path):
- async with aiopen(path, 'rb+') as pf:
- pf_bin = await pf.read()
- else:
- pf_bin = ''
- path = path.replace('.', '__')
- await self.__db.settings.files.update_one({'_id': bot_id}, {'$set': {path: pf_bin}}, upsert=True)
- if path == 'config.env':
- await self.update_deploy_config()
- else:
- self.__conn.close
-
- async def update_user_data(self, user_id):
- if self.__err:
- return
- data = user_data[user_id]
- if data.get('thumb'):
- del data['thumb']
- if data.get('rclone'):
- del data['rclone']
- await self.__db.users[bot_id].replace_one({'_id': user_id}, data, upsert=True)
- self.__conn.close
-
- async def update_user_doc(self, user_id, key, path=''):
- if self.__err:
- return
- if path:
- async with aiopen(path, 'rb+') as doc:
- doc_bin = await doc.read()
- else:
- doc_bin = ''
- await self.__db.users[bot_id].update_one({'_id': user_id}, {'$set': {key: doc_bin}}, upsert=True)
- self.__conn.close
-
- async def get_pm_uids(self):
- if self.__err:
- return
- return [doc['_id'] async for doc in self.__db.pm_users[bot_id].find({})]
- self.__conn.close
-
- async def update_pm_users(self, user_id):
- if self.__err:
- return
- if not bool(await self.__db.pm_users[bot_id].find_one({'_id': user_id})):
- await self.__db.pm_users[bot_id].insert_one({'_id': user_id})
- LOGGER.info(f'New PM User Added : {user_id}')
- self.__conn.close
-
- async def rm_pm_user(self, user_id):
- if self.__err:
- return
- await self.__db.pm_users[bot_id].delete_one({'_id': user_id})
- self.__conn.close
-
- async def rss_update_all(self):
- if self.__err:
- return
- for user_id in list(rss_dict.keys()):
- await self.__db.rss[bot_id].replace_one({'_id': user_id}, rss_dict[user_id], upsert=True)
- self.__conn.close
- async def rss_update(self, user_id):
- if self.__err:
- return
- await self.__db.rss[bot_id].replace_one({'_id': user_id}, rss_dict[user_id], upsert=True)
- self.__conn.close
+ # ... other db_load code ...
- async def rss_delete(self, user_id):
- if self.__err:
- return
- await self.__db.rss[bot_id].delete_one({'_id': user_id})
- self.__conn.close
-
- async def add_incomplete_task(self, cid, link, tag):
- if self.__err:
- return
- await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag})
- self.__conn.close
-
- async def rm_complete_task(self, link):
- if self.__err:
- return
- await self.__db.tasks[bot_id].delete_one({'_id': link})
- self.__conn.close
-
- async def get_incomplete_tasks(self):
- notifier_dict = {}
- if self.__err:
- return notifier_dict
- if await self.__db.tasks[bot_id].find_one():
- # return a dict ==> {_id, cid, tag}
- rows = self.__db.tasks[bot_id].find({})
- async for row in rows:
- if row['cid'] in list(notifier_dict.keys()):
- if row['tag'] in list(notifier_dict[row['cid']]):
- notifier_dict[row['cid']][row['tag']].append(
- row['_id'])
- else:
- notifier_dict[row['cid']][row['tag']] = [row['_id']]
- else:
- notifier_dict[row['cid']] = {row['tag']: [row['_id']]}
- await self.__db.tasks[bot_id].drop()
- self.__conn.close
- return notifier_dict # return a dict ==> {cid: {tag: [_id, _id, ...]}}
-
- async def trunc_table(self, name):
- if self.__err:
- return
- await self.__db[name][bot_id].drop()
- self.__conn.close
+ async def close(self):
+ """Close the database connection"""
+ if self.__conn_pool:
+ await self.__conn_pool.close()
+# ... other methods ...
if DATABASE_URL:
- bot_loop.run_until_complete(DbManger().db_load())
+ loop = asyncio.get_event_loop()
+ db_manager = DbManager()
+ loop.run_until_complete(db_manager.db_load())
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
index 10973a015b..bbfae8d5d8 100644
--- a/bot/helper/ext_utils/fs_utils.py
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -1,121 +1,142 @@
-#!/usr/bin/env python3
-from os import walk, path as ospath
-from aiofiles.os import remove as aioremove, path as aiopath, listdir, rmdir, makedirs
-from aioshutil import rmtree as aiormtree
-from shutil import rmtree, disk_usage
-from magic import Magic
-from re import split as re_split, I, search as re_search
-from subprocess import run as srun
-from sys import exit as sexit
+import os
+import asyncio
+import pathlib as plib
+from typing import List, Tuple, Union
+
+import aiofiles.os
+import aioshutil
+import shutil
+import magic
+import re
+import subprocess
+from aiohttp import ClientSession
+from bot.helper.ext_utils.bot_utils import sync_to_async
from .exceptions import NotSupportedExtractionArchive
from bot import aria2, LOGGER, DOWNLOAD_DIR, get_client, GLOBAL_EXTENSION_FILTER
-from bot.helper.ext_utils.bot_utils import sync_to_async, cmd_exec
-ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
- ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
- ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr",
- ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"]
+ARCH_EXT = [
+ ".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
+ ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
+ ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr",
+ ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"
+]
FIRST_SPLIT_REGEX = r'(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$'
-
SPLIT_REGEX = r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$'
-def is_first_archive_split(file):
- return bool(re_search(FIRST_SPLIT_REGEX, file))
+async def is_first_archive_split(file: str) -> bool:
+ """Check if the file is the first split of an archived file."""
+ return bool(re.search(FIRST_SPLIT_REGEX, file))
-def is_archive(file):
- return file.endswith(tuple(ARCH_EXT))
+async def is_archive(file: str) -> bool:
+ """Check if the file is an archive."""
+ return file.endswith(ARCH_EXT)
-def is_archive_split(file):
- return bool(re_search(SPLIT_REGEX, file))
+async def is_archive_split(file: str) -> bool:
+ """Check if the file is a split of an archived file."""
+ return bool(re.search(SPLIT_REGEX, file))
-async def clean_target(path):
- if await aiopath.exists(path):
+async def clean_target(path: Union[str, plib.Path]) -> None:
+ """Clean the target path."""
+ path = plib.Path(path)
+ if path.exists():
LOGGER.info(f"Cleaning Target: {path}")
- if await aiopath.isdir(path):
+ if path.is_dir():
try:
- await aiormtree(path)
+ await aioshutil.rmtree(path)
except:
pass
- elif await aiopath.isfile(path):
+ elif path.is_file():
try:
- await aioremove(path)
+ await aiofiles.os.remove(path)
except:
pass
-async def clean_download(path):
- if await aiopath.exists(path):
+async def clean_download(path: Union[str, plib.Path]) -> None:
+ """Clean the download path."""
+ path = plib.Path(path)
+ if path.exists():
LOGGER.info(f"Cleaning Download: {path}")
try:
- await aiormtree(path)
+ await aioshutil.rmtree(path)
except:
pass
-async def start_cleanup():
+async def start_cleanup() -> None:
+ """Start the cleanup process."""
get_client().torrents_delete(torrent_hashes="all")
try:
- await aiormtree(DOWNLOAD_DIR)
+ await aioshutil.rmtree(DOWNLOAD_DIR)
except:
pass
- await makedirs(DOWNLOAD_DIR)
+ await aiofiles.os.makedirs(DOWNLOAD_DIR)
-def clean_all():
+def clean_all() -> None:
+ """Clean all downloads and exit."""
aria2.remove_all(True)
get_client().torrents_delete(torrent_hashes="all")
try:
- rmtree(DOWNLOAD_DIR)
+ shutil.rmtree(DOWNLOAD_DIR)
except:
pass
-def exit_clean_up(signal, frame):
+def exit_clean_up(signal, frame) -> None:
+ """Clean up and exit."""
try:
LOGGER.info(
"Please wait, while we clean up and stop the running downloads")
clean_all()
- srun(['pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg'])
- sexit(0)
+ subprocess.run(['pkill', '-9', '-f', 'gunicorn|aria2c|qbittorrent-nox|ffmpeg'],
+ check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ exit(0)
except KeyboardInterrupt:
LOGGER.warning("Force Exiting before the cleanup finishes!")
- sexit(1)
+ exit(1)
-async def clean_unwanted(path):
+async def clean_unwanted(path: Union[str, plib.Path]) -> None:
+ """Clean unwanted files and folders."""
+ path = plib.Path(path)
LOGGER.info(f"Cleaning unwanted files/folders: {path}")
- for dirpath, _, files in await sync_to_async(walk, path, topdown=False):
+ async for dirpath, _, files in sync_to_async(path.rglob, path, topdown=False):
for filee in files:
if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'):
- await aioremove(ospath.join(dirpath, filee))
- if dirpath.endswith((".unwanted", "splited_files_mltb", "copied_mltb")):
- await aiormtree(dirpath)
- for dirpath, _, files in await sync_to_async(walk, path, topdown=False):
- if not await listdir(dirpath):
- await rmdir(dirpath)
-
-
-async def get_path_size(path):
- if await aiopath.isfile(path):
- return await aiopath.getsize(path)
+ await aiofiles.os.remove(dirpath / filee)
+ if dirpath.name in (".unwanted", "splited_files_mltb", "copied_mltb"):
+ await aioshutil.rmtree(dirpath)
+ for dirpath, _, files in await sync_to_async(path.rglob, path, topdown=False):
+ if not await asyncio.gather(*(aiofiles.os.path.exists(p) for p in dirpath.glob("*"))):
+ await aioshutil.rmtree(dirpath)
+
+
+async def get_path_size(path: Union[str, plib.Path]) -> int:
+ """Get the size of the path."""
+ path = plib.Path(path)
+ if path.is_file():
+ return await aiofiles.os.stat(path).size
total_size = 0
- for root, dirs, files in await sync_to_async(walk, path):
+ async for dirpath, _, files in sync_to_async(path.rglob, path, topdown=False):
for f in files:
- abs_path = ospath.join(root, f)
- total_size += await aiopath.getsize(abs_path)
+ abs_path = dirpath / f
+ total_size += await aiofiles.os.stat(abs_path).size
return total_size
-async def count_files_and_folders(path):
+async def count_files_and_folders(path: Union[str, plib.Path]) -> Tuple[int, int]:
+ """Count the number of files and folders in the path."""
+ path = plib.Path(path)
total_files = 0
total_folders = 0
- for _, dirs, files in await sync_to_async(walk, path):
+ async for _, dirs, files in sync_to_async(path.rglob, path, topdown=False):
total_files += len(files)
for f in files:
if f.endswith(tuple(GLOBAL_EXTENSION_FILTER)):
@@ -124,26 +145,29 @@ async def count_files_and_folders(path):
return total_folders, total_files
-def get_base_name(orig_path):
+def get_base_name(orig_path: str) -> str:
+ """Get the base name of the file."""
extension = next(
(ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), ''
)
if extension != '':
- return re_split(f'{extension}$', orig_path, maxsplit=1, flags=I)[0]
+ return re.split(f'{extension}$', orig_path, maxsplit=1, flags=re.IGNORECASE)[0]
else:
raise NotSupportedExtractionArchive(
'File format not supported for extraction')
-def get_mime_type(file_path):
- mime = Magic(mime=True)
+def get_mime_type(file_path: str) -> str:
+ """Get the mime type of the file."""
+ mime = magic.Magic(mime=True)
mime_type = mime.from_file(file_path)
mime_type = mime_type or "text/plain"
return mime_type
-def check_storage_threshold(size, threshold, arch=False, alloc=False):
- free = disk_usage(DOWNLOAD_DIR).free
+def check_storage_threshold(size: int, threshold: int, arch: bool = False, alloc: bool = False) -> bool:
+ """Check if the storage threshold is met."""
+ free = shutil.disk_usage(DOWNLOAD_DIR).free
if not alloc:
if (not arch and free - size < threshold or arch and free - (size * 2) < threshold):
return False
@@ -155,20 +179,22 @@ def check_storage_threshold(size, threshold, arch=False, alloc=False):
return True
-async def join_files(path):
- files = await listdir(path)
+async def join_files(path: Union[str, plib.Path]) -> None:
+ """Join the split files."""
+ path = plib.Path(path)
+ files = await asyncio.gather(*(aiofiles.os.listdir(path)))
results = []
for file_ in files:
- if re_search(r"\.0+2$", file_) and await sync_to_async(get_mime_type, f'{path}/{file_}') == 'application/octet-stream':
+ if re.search(r"\.0+2$", file_) and await sync_to_async(get_mime_type, str(path / file_)) == 'application/octet-stream':
final_name = file_.rsplit('.', 1)[0]
cmd = f'cat {path}/{final_name}.* > {path}/{final_name}'
- _, stderr, code = await cmd_exec(cmd, True)
+ _, stderr, code = await sync_to_async(subprocess.run, cmd, shell=True, capture_output=True)
if code != 0:
- LOGGER.error(f'Failed to join {final_name}, stderr: {stderr}')
+ LOGGER.error(f'Failed to join {final_name}, stderr: {stderr.decode()}')
else:
results.append(final_name)
if results:
for res in results:
for file_ in files:
- if re_search(fr"{res}\.0[0-9]+$", file_):
- await aioremove(f'{path}/{file_}')
+ if re.search(fr"{res}\.0[0-9]+$", file_):
+ await aiofiles.os.remove(path / file_)
diff --git a/bot/helper/ext_utils/leech_utils.py b/bot/helper/ext_utils/leech_utils.py
index 6c2a60ac74..8f787ee78e 100644
--- a/bot/helper/ext_utils/leech_utils.py
+++ b/bot/helper/ext_utils/leech_utils.py
@@ -1,287 +1,21 @@
import hashlib
-from re import sub as re_sub
-from shlex import split as ssplit
-from os import path as ospath
-from aiofiles.os import remove as aioremove, path as aiopath, mkdir
-from time import time
-from re import search as re_search
+import os
+import re
+import shlex
from asyncio import create_subprocess_exec
from asyncio.subprocess import PIPE
+from pathlib import Path
+from typing import Any, Callable, List, Optional, Tuple
+import aiofiles
+from aiofiles.os import remove as aioremove, path as aiopath, mkdir
from bot import LOGGER, MAX_SPLIT_SIZE, config_dict, user_data
from bot.modules.mediainfo import parseinfo
from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async, get_readable_file_size, get_readable_time
from bot.helper.ext_utils.fs_utils import ARCH_EXT, get_mime_type
from bot.helper.ext_utils.telegraph_helper import telegraph
-async def is_multi_streams(path):
- try:
- result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
- "json", "-show_streams", path])
- if res := result[1]:
- LOGGER.warning(f'Get Video Streams: {res}')
- except Exception as e:
- LOGGER.error(f'Get Video Streams: {e}. Mostly File not found!')
- return False
- fields = eval(result[0]).get('streams')
- if fields is None:
- LOGGER.error(f"get_video_streams: {result}")
- return False
- videos = 0
- audios = 0
- for stream in fields:
- if stream.get('codec_type') == 'video':
- videos += 1
- elif stream.get('codec_type') == 'audio':
- audios += 1
- return videos > 1 or audios > 1
-
-
-async def get_media_info(path):
+async def is_multi_streams(path: str) -> bool:
+ """Check if the media file has multiple video or audio streams."""
try:
- result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
- "json", "-show_format", path])
- if res := result[1]:
- LOGGER.warning(f'Get Media Info: {res}')
- except Exception as e:
- LOGGER.error(f'Get Media Info: {e}. Mostly File not found!')
- return 0, None, None
- fields = eval(result[0]).get('format')
- if fields is None:
- LOGGER.error(f"get_media_info: {result}")
- return 0, None, None
- duration = round(float(fields.get('duration', 0)))
- tags = fields.get('tags', {})
- artist = tags.get('artist') or tags.get('ARTIST')
- title = tags.get('title') or tags.get('TITLE')
- return duration, artist, title
-
-
-async def get_document_type(path):
- is_video, is_audio, is_image = False, False, False
- if path.endswith(tuple(ARCH_EXT)) or re_search(r'.+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$', path):
- return is_video, is_audio, is_image
- mime_type = await sync_to_async(get_mime_type, path)
- if mime_type.startswith('audio'):
- return False, True, False
- if mime_type.startswith('image'):
- return False, False, True
- if not mime_type.startswith('video') and not mime_type.endswith('octet-stream'):
- return is_video, is_audio, is_image
- try:
- result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
- "json", "-show_streams", path])
- if res := result[1]:
- LOGGER.warning(f'Get Document Type: {res}')
- except Exception as e:
- LOGGER.error(f'Get Document Type: {e}. Mostly File not found!')
- return is_video, is_audio, is_image
- fields = eval(result[0]).get('streams')
- if fields is None:
- LOGGER.error(f"get_document_type: {result}")
- return is_video, is_audio, is_image
- for stream in fields:
- if stream.get('codec_type') == 'video':
- is_video = True
- elif stream.get('codec_type') == 'audio':
- is_audio = True
- return is_video, is_audio, is_image
-
-
-async def take_ss(video_file, duration):
- des_dir = 'Thumbnails'
- if not await aiopath.exists(des_dir):
- await mkdir(des_dir)
- des_dir = ospath.join(des_dir, f"{time()}.jpg")
- if duration is None:
- duration = (await get_media_info(video_file))[0]
- if duration == 0:
- duration = 3
- duration = duration // 2
- cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
- "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir]
- status = await create_subprocess_exec(*cmd, stderr=PIPE)
- if await status.wait() != 0 or not await aiopath.exists(des_dir):
- err = (await status.stderr.read()).decode().strip()
- LOGGER.error(
- f'Error while extracting thumbnail. Name: {video_file} stderr: {err}')
- return None
- return des_dir
-
-
-async def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, multi_streams=True):
- if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9:
- return False
- if listener.seed and not listener.newDir:
- dirpath = f"{dirpath}/splited_files_mltb"
- if not await aiopath.exists(dirpath):
- await mkdir(dirpath)
- user_id = listener.message.from_user.id
- user_dict = user_data.get(user_id, {})
- leech_split_size = user_dict.get(
- 'split_size') or config_dict['LEECH_SPLIT_SIZE']
- parts = -(-size // leech_split_size)
- if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop:
- split_size = ((size + parts - 1) // parts) + 1000
- if (await get_document_type(path))[0]:
- if multi_streams:
- multi_streams = await is_multi_streams(path)
- duration = (await get_media_info(path))[0]
- base_name, extension = ospath.splitext(file_)
- split_size -= 5000000
- while i <= parts or start_time < duration - 4:
- parted_name = f"{base_name}.part{i:03}{extension}"
- out_path = ospath.join(dirpath, parted_name)
- cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(start_time), "-i", path,
- "-fs", str(split_size), "-map", "0", "-map_chapters", "-1", "-async", "1", "-strict",
- "-2", "-c", "copy", out_path]
- if not multi_streams:
- del cmd[10]
- del cmd[10]
- if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9:
- return False
- listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE)
- code = await listener.suproc.wait()
- if code == -9:
- return False
- elif code != 0:
- err = (await listener.suproc.stderr.read()).decode().strip()
- try:
- await aioremove(out_path)
- except:
- pass
- if multi_streams:
- LOGGER.warning(
- f"{err}. Retrying without map, -map 0 not working in all situations. Path: {path}")
- return await split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, False)
- else:
- LOGGER.warning(
- f"{err}. Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}")
- return "errored"
- out_size = await aiopath.getsize(out_path)
- if out_size > MAX_SPLIT_SIZE:
- dif = out_size - MAX_SPLIT_SIZE
- split_size -= dif + 5000000
- await aioremove(out_path)
- return await split_file(path, size, file_, dirpath, split_size, listener, start_time, i, True, )
- lpd = (await get_media_info(out_path))[0]
- if lpd == 0:
- LOGGER.error(
- f'Something went wrong while splitting, mostly file is corrupted. Path: {path}')
- break
- elif duration == lpd:
- LOGGER.warning(
- f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {path}")
- break
- elif lpd <= 3:
- await aioremove(out_path)
- break
- start_time += lpd - 3
- i += 1
- else:
- out_path = ospath.join(dirpath, f"{file_}.")
- listener.suproc = await create_subprocess_exec("split", "--numeric-suffixes=1", "--suffix-length=3",
- f"--bytes={split_size}", path, out_path, stderr=PIPE)
- code = await listener.suproc.wait()
- if code == -9:
- return False
- elif code != 0:
- err = (await listener.suproc.stderr.read()).decode().strip()
- LOGGER.error(err)
- return True
-
-async def format_filename(file_, user_id, dirpath=None, isMirror=False):
- user_dict = user_data.get(user_id, {})
- ftag, ctag = ('m', 'MIRROR') if isMirror else ('l', 'LEECH')
- prefix = config_dict[f'{ctag}_FILENAME_PREFIX'] if (val:=user_dict.get(f'{ftag}prefix', '')) == '' else val
- remname = config_dict[f'{ctag}_FILENAME_REMNAME'] if (val:=user_dict.get(f'{ftag}remname', '')) == '' else val
- suffix = config_dict[f'{ctag}_FILENAME_SUFFIX'] if (val:=user_dict.get(f'{ftag}suffix', '')) == '' else val
- lcaption = config_dict['LEECH_FILENAME_CAPTION'] if (val:=user_dict.get('lcaption', '')) == '' else val
-
- prefile_ = file_
- # SD-Style V2 ~ WZML-X
- if file_.startswith('www'): #Remove all www.xyz.xyz domains
- file_ = ' '.join(file_.split()[1:])
-
- if remname:
- if not remname.startswith('|'):
- remname = f"|{remname}"
- remname = remname.replace('\s', ' ')
- slit = remname.split("|")
- __newFileName = ospath.splitext(file_)[0]
- for rep in range(1, len(slit)):
- args = slit[rep].split(":")
- if len(args) == 3:
- __newFileName = re_sub(args[0], args[1], __newFileName, int(args[2]))
- elif len(args) == 2:
- __newFileName = re_sub(args[0], args[1], __newFileName)
- elif len(args) == 1:
- __newFileName = re_sub(args[0], '', __newFileName)
- file_ = __newFileName + ospath.splitext(file_)[1]
- LOGGER.info(f"New Remname : {file_}")
-
- nfile_ = file_
- if prefix:
- nfile_ = prefix.replace('\s', ' ') + file_
- prefix = re_sub('<.*?>', '', prefix).replace('\s', ' ')
- if not file_.startswith(prefix):
- file_ = f"{prefix}{file_}"
-
- if suffix and not isMirror:
- suffix = suffix.replace('\s', ' ')
- sufLen = len(suffix)
- fileDict = file_.split('.')
- _extIn = 1 + len(fileDict[-1])
- _extOutName = '.'.join(
- fileDict[:-1]).replace('.', ' ').replace('-', ' ')
- _newExtFileName = f"{_extOutName}{suffix}.{fileDict[-1]}"
- if len(_extOutName) > (64 - (sufLen + _extIn)):
- _newExtFileName = (
- _extOutName[: 64 - (sufLen + _extIn)]
- + f"{suffix}.{fileDict[-1]}"
- )
- file_ = _newExtFileName
- elif suffix:
- suffix = suffix.replace('\s', ' ')
- file_ = f"{ospath.splitext(file_)[0]}{suffix}{ospath.splitext(file_)[1]}" if '.' in file_ else f"{file_}{suffix}"
-
-
- cap_mono = f"<{config_dict['CAP_FONT']}>{nfile_}{config_dict['CAP_FONT']}>" if config_dict['CAP_FONT'] else nfile_
- if lcaption and dirpath and not isMirror:
- lcaption = lcaption.replace('\|', '%%').replace('\s', ' ')
- slit = lcaption.split("|")
- up_path = ospath.join(dirpath, prefile_)
- cap_mono = slit[0].format(
- filename = nfile_,
- size = get_readable_file_size(await aiopath.getsize(up_path)),
- duration = get_readable_time((await get_media_info(up_path))[0]),
- md5_hash = get_md5_hash(up_path)
- )
- if len(slit) > 1:
- for rep in range(1, len(slit)):
- args = slit[rep].split(":")
- if len(args) == 3:
- cap_mono = cap_mono.replace(args[0], args[1], int(args[2]))
- elif len(args) == 2:
- cap_mono = cap_mono.replace(args[0], args[1])
- elif len(args) == 1:
- cap_mono = cap_mono.replace(args[0], '')
- cap_mono = cap_mono.replace('%%', '|')
- return file_, cap_mono
-
-
-async def get_mediainfo_link(up_path):
- stdout, __, _ = await cmd_exec(ssplit(f'mediainfo "{up_path}"'))
- tc = f"📌
{cat_name}
\n\nTimeout: 60 sec', buttons.build_menu(3))
+ prompt = await send_message(message, f'Select the category where you want to upload\n\nUpload Category: {cat_name}
\n\nTimeout: 60 sec', buttons.build_menu(3)) # noqa
start_time = time()
bot_cache[msg_id] = [None, None, False, False, start_time]
while time() - start_time <= 60:
- await sleep(0.5)
+ await asyncio.sleep(0.5)
if bot_cache[msg_id][2] or bot_cache[msg_id][3]:
break
drive_id, index_link, _, is_cancelled, __ = bot_cache[msg_id]
if not is_cancelled:
- await deleteMessage(prompt)
- else:
- await editMessage(prompt, "Task Cancelled")
- del bot_cache[msg_id]
- return drive_id, index_link, is_cancelled
-
-
-async def open_dump_btns(message):
- user_id = message.from_user.id
- msg_id = message.id
- buttons = ButtonMaker()
- _tick = True
- if len(udmps := await fetch_user_dumps(user_id)) > 1:
- for _name in udmps.keys():
- buttons.ibutton(f'{"✅️" if _tick else ""} {_name}', f"dcat {user_id} {msg_id} {_name.replace(' ', '_')}")
- if _tick: _tick, cat_name = False, _name
- buttons.ibutton('Upload in All', f'dcat {user_id} {msg_id} All', 'header')
- buttons.ibutton('Cancel', f'dcat {user_id} {msg_id} dcancel', 'footer')
- buttons.ibutton(f'Done (60)', f'dcat {user_id} {msg_id} ddone', 'footer')
- prompt = await sendMessage(message, f'Select the Dump category where you want to upload\n\nUpload Category: {cat_name}
\n\nTimeout: 60 sec', buttons.build_menu(3))
- start_time = time()
- bot_cache[msg_id] = [None, False, False, start_time]
- while time() - start_time <= 60:
- await sleep(0.5)
- if bot_cache[msg_id][1] or bot_cache[msg_id][2]:
- break
- dump_chat, _, is_cancelled, __ = bot_cache[msg_id]
- if not is_cancelled:
- await deleteMessage(prompt)
+ await delete_message(prompt)
else:
- await editMessage(prompt, "Task Cancelled")
+ await edit_message(prompt, "Task Cancelled")
del bot_cache[msg_id]
- return dump_chat, is_cancelled
-
-
-async def forcesub(message, ids, button=None):
- join_button = {}
- _msg = ''
- for channel_id in ids.split():
- chat = await chat_info(channel_id)
- try:
- await chat.get_member(message.from_user.id)
- except UserNotParticipant:
- if username := chat.username:
- invite_link = f"https://t.me/{username}"
- else:
- invite_link = chat.invite_link
- join_button[chat.title] = invite_link
- except RPCError as e:
- LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}")
- except Exception as e:
- LOGGER.error(f'{e} for {channel_id}')
- if join_button:
- if button is None:
- button = ButtonMaker()
- _msg = "You haven't joined our channel yet!"
- for key, value in join_button.items():
- button.ubutton(f'Join {key}', value, 'footer')
- return _msg, button
+ return drive_id, index_link
-async def user_info(user_id):
- try:
- return await bot.get_users(user_id)
- except Exception:
- return ''
-
-
-async def check_botpm(message, button=None):
- try:
- temp_msg = await message._client.send_message(chat_id=message.from_user.id, text='Checking Access...')
- await deleteMessage(temp_msg)
- return None, button
- except Exception as e:
- if button is None:
- button = ButtonMaker()
- _msg = "You didn't START the bot in PM (Private)"
- button.ubutton("Start Bot Now", f"https://t.me/{bot_name}?start=start", 'header')
- return _msg, button
+# ... other functions ...
diff --git a/bot/helper/themes/wzml_minimal.py b/bot/helper/themes/wzml_minimal.py
index 1ce4ee56d9..6c94f20aac 100644
--- a/bot/helper/themes/wzml_minimal.py
+++ b/bot/helper/themes/wzml_minimal.py
@@ -1,19 +1,34 @@
#!/usr/bin/env python3
class WZMLStyle:
+ """Class containing various message styles for the WZML bot."""
+
+ # ----------------------
+ # Message styles for bot startup
# ----------------------
- # async def start(client, message) ---> __main__.py
- ST_BN1_NAME = 'Repo'
- ST_BN1_URL = 'https://www.github.com/weebzone/WZML-X'
- ST_BN2_NAME = 'Updates'
- ST_BN2_URL = 'https://t.me/WZML_X'
- ST_MSG = '''This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram or to ddl servers.
+
+ #: The name of the first bot button
+ ST_BN1_NAME: str = 'Repo'
+ #: The URL of the first bot button
+ ST_BN1_URL: str = 'https://www.github.com/weebzone/WZML-X'
+ #: The name of the second bot button
+ ST_BN2_NAME: str = 'Updates'
+ #: The URL of the second bot button
+ ST_BN2_URL: str = 'https://t.me/WZML_X'
+ #: The startup message for the bot
+ ST_MSG: str = '''This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram or to ddl servers.
Type {help_command} to get a list of available commands'''
- ST_BOTPM = '''Now, This bot will send all your files and links here. Start Using ...'''
- ST_UNAUTH = '''You Are not authorized user! Deploy your own WZML-X Mirror-Leech bot'''
- # ---------------------
+ #: The message to be sent when the bot is added to a new chat
+ ST_BOTPM: str = '''Now, This bot will send all your files and links here. Start Using ...'''
+ #: The message to be sent when the user is not authorized
+ ST_UNAUTH: str = '''You Are not authorized user! Deploy your own WZML-X Mirror-Leech bot'''
+ # ----------------------
+
+ # ----------------------
+ # Message styles for bot statistics
+ # ----------------------
- # async def stats(client, message):
- BOT_STATS = '''⌬ BOT STATISTICS :
+ #: The bot statistics message
+ BOT_STATS: str = '''⌬ BOT STATISTICS :
┖ Bot Uptime : {bot_uptime}
┎ RAM ( MEMORY ) :
@@ -31,7 +46,8 @@ class WZMLStyle:
┖ U : {disk_u} | F : {disk_f} | T : {disk_t}
'''
- SYS_STATS = '''⌬ OS SYSTEM :
+ #: The system statistics message
+ SYS_STATS: str = '''⌬ OS SYSTEM :
┠ OS Uptime : {os_uptime}
┠ OS Version : {os_version}
┖ OS Arch : {os_arch}
@@ -51,7 +67,8 @@ class WZMLStyle:
┠ Total Core(s) : {total_core}
┖ Usable CPU(s) : {cpu_use}
'''
- REPO_STATS = '''⌬ REPO STATISTICS :
+ #: The repository statistics message
+ REPO_STATS: str = '''⌬ REPO STATISTICS :
┠ Bot Updated : {last_commit}
┠ Current Version : {bot_version}
┠ Latest Version : {lat_version}
@@ -59,7 +76,8 @@ class WZMLStyle:
⌬ REMARKS : {remarks}
'''
- BOT_LIMITS = '''⌬ BOT LIMITATIONS :
+ #: The bot limitations message
+ BOT_LIMITS: str = '''⌬ BOT LIMITATIONS :
┠ Direct Limit : {DL} GB
┠ Torrent Limit : {TL} GB
┠ GDrive Limit : {GL} GB
@@ -74,194 +92,219 @@ class WZMLStyle:
┠ User Parallel Tasks : {UT}
┖ Bot Parallel Tasks : {BT}
'''
- # ---------------------
+ # ----------------------
- # async def restart(client, message): ---> __main__.py
- RESTARTING = 'Restarting...'
- # ---------------------
+ # ----------------------
+ # Message styles for bot restart
+ # ----------------------
- # async def restart_notification(): ---> __main__.py
- RESTART_SUCCESS = '''⌬ Restarted Successfully!
+ #: The restarting message
+ RESTARTING: str = 'Restarting...'
+ #: The restart success message
+ RESTART_SUCCESS: str = '''⌬ Restarted Successfully!
┠ Date: {date}
┠ Time: {time}
┠ TimeZone: {timz}
┖ Version: {version}'''
- RESTARTED = '''⌬ Bot Restarted!'''
- # ---------------------
+ #: The restarted message
+ RESTARTED: str = '''⌬ Bot Restarted!'''
+ # ----------------------
- # async def ping(client, message): ---> __main__.py
- PING = 'Starting Ping..'
- PING_VALUE = 'Pong\n{value} ms..
'
- # ---------------------
+ # ----------------------
+ # Message styles for bot ping
+ # ----------------------
+
+ #: The ping message
+ PING: str = 'Starting Ping..'
+ #: The ping value message
+ PING_VALUE: str = 'Pong\n{value} ms..
'
+ # ----------------------
- # async def onDownloadStart(self): --> tasks_listener.py
- LINKS_START = """Task Started
+ # ----------------------
+ # Message styles for tasks listener
+ # ----------------------
+
+ #: The links start message
+ LINKS_START: str = """Task Started
┠ Mode: {Mode}
┖ By: {Tag}\n\n"""
- LINKS_SOURCE = """➲ Source:
+ #: The links source message
+ LINKS_SOURCE: str = """➲ Source:
┖ Added On: {On}
------------------------------------------
{Source}
------------------------------------------\n\n"""
-
- # async def __msg_to_reply(self): ---> pyrogramEngine.py
- PM_START = "➲ Task Started :\n┃\n┖ Link: Click Here"
- L_LOG_START = "➲ Leech Started :\n┃\n┠ User : {mention} ( #ID{uid} )\n┖ Source : Click Here"
-
- # async def onUploadComplete(): ---> tasks_listener.py
- NAME = '{Name}\n┃\n'
- SIZE = '┠ Size: {Size}\n'
- ELAPSE = '┠ Elapsed: {Time}\n'
- MODE = '┠ Mode: {Mode}\n'
-
- # ----- LEECH -------
- L_TOTAL_FILES = '┠ Total Files: {Files}\n'
- L_CORRUPTED_FILES = '┠ Corrupted Files: {Corrupt}\n'
- L_CC = '┖ By: {Tag}\n\n'
- PM_BOT_MSG = '➲ File(s) have been Sent above'
- L_BOT_MSG = '➲ File(s) have been Sent to Bot PM (Private)'
- L_LL_MSG = '➲ File(s) have been Sent. Access via Links...'
-
- # ----- MIRROR -------
- M_TYPE = '┠ Type: {Mimetype}\n'
- M_SUBFOLD = '┠ SubFolders: {Folder}\n'
- TOTAL_FILES = '┠ Files: {Files}\n'
- RCPATH = '┠ Path: {RCpath}
\n'
- M_CC = '┖ By: {Tag}\n\n'
- M_BOT_MSG = '➲ Link(s) have been Sent to Bot PM (Private)'
-
- # ----- BUTTONS -------
- CLOUD_LINK = '☁️ Cloud Link'
- SAVE_MSG = '📨 Save Message'
- RCLONE_LINK = '♻️ RClone Link'
- DDL_LINK = '📎 {Serv} Link'
- SOURCE_URL = '🔐 Source Link'
- INDEX_LINK_F = '🗂 Index Link'
- INDEX_LINK_D = '⚡ Index Link'
- VIEW_LINK = '🌐 View Link'
- CHECK_PM = '📥 View in Bot PM'
- CHECK_LL = '🖇 View in Links Log'
- MEDIAINFO_LINK = '📃 MediaInfo'
- # ---------------------
-
- # def get_readable_message(): ---> bot_utilis.py
- ####--------OVERALL MSG HEADER----------
- STATUS_NAME = '{Name}'
+ #: The PM start message
+ PM_START: str = "➲ Task Started :\n┃\n┖ Link: Click Here"
+ #: The leech log start message
+ L_LOG_START: str = "➲ Leech Started :\n┃\n┠ User : {mention} ( #ID{uid} )\n┖ Source : Click Here"
+
+ #: The name message
+ NAME: str = '{Name}\n┃\n'
+ #: The size message
+ SIZE: str = '┠ Size: {Size}\n'
+ #: The elapsed message
+ ELAPSE: str = '┠ Elapsed: {Time}\n'
+ #: The mode message
+ MODE: str = '┠ Mode: {Mode}\n'
+
+ #: The total files message (leech)
+ L_TOTAL_FILES: str = '┠ Total Files: {Files}\n'
+ #: The corrupted files message (leech)
+ L_CORRUPTED_FILES: str = '┠ Corrupted Files: {Corrupt}\n'
+ #: The leech complete message
+ L_CC: str = '┖ By: {Tag}\n\n'
+ #: The PM bot message
+ PM_BOT_MSG: str = '➲ File(s) have been Sent above'
+ #: The leech bot message
+ L_BOT_MSG: str = '➲ File(s) have been Sent to Bot PM (Private)'
+ #: The leech link message
+ L_LL_MSG: str = '➲ File(s) have been Sent. Access via Links...'
+
+ #: The type message (mirror)
+ M_TYPE: str = '┠ Type: {Mimetype}\n'
+ #: The subfolders message (mirror)
+ M_SUBFOLD: str = '┠ SubFolders: {Folder}\n'
+ #: The total files message (mirror)
+ TOTAL_FILES: str = '┠ Files: {Files}\n'
+ #: The rcpath message (mirror)
+ RCPATH: str = '┠ Path: {RCpath}
\n'
+ #: The mirror complete message
+ M_CC: str = '┖ By: {Tag}\n\n'
+ #: The mirror bot message
+ M_BOT_MSG: str = '➲ Link(s) have been Sent to Bot PM (Private)'
+
+ #: The cloud link button
+ CLOUD_LINK: str = '☁️ Cloud Link'
+ #: The save message button
+ SAVE_MSG: str = '📨 Save Message'
+ #: The rclone link button
+ RCLONE_LINK: str = '♻️ RClone Link'
+ #: The ddl link button
+ DDL_LINK: str = '📎 {Serv} Link'
+ #: The source url button
+ SOURCE_URL: str = '🔐 Source Link'
+ #: The index link folder button
+ INDEX_LINK_F: str = '🗂 Index Link'
+ #: The index link download button
+ INDEX_LINK_D: str = '⚡ Index Link'
+ #: The view link button
+ VIEW_LINK: str = '🌐 View Link'
+ #: The check pm button
+ CHECK_PM: str = '📥 View in Bot PM'
+ #: The check ll button
+ CHECK_LL: str = '🖇 View in Links Log'
+ #: The mediainfo link button
+ MEDIAINFO_LINK: str = '📃 MediaInfo'
+ # ----------------------
+
+ # ----------------------
+ # Message styles for bot utils
+ # ----------------------
+
+ #: The status name message
+ STATUS_NAME: str = '{Name}'
#####---------PROGRESSIVE STATUS-------
- BAR = '\n┃ {Bar}'
- PROCESSED = '\n┠ Processed: {Processed}'
- STATUS = '\n┠ Status: {Status}'
- ETA = ' | ETA: {Eta}'
- SPEED = '\n┠ Speed: {Speed}'
- ELAPSED = ' | Elapsed: {Elapsed}'
- ENGINE = '\n┠ Engine: {Engine}'
- STA_MODE = '\n┠ Mode: {Mode}'
- SEEDERS = '\n┠ Seeders: {Seeders} | '
- LEECHERS = 'Leechers: {Leechers}'
-
- ####--------SEEDING----------
- SEED_SIZE = '\n┠ Size: {Size}'
- SEED_SPEED = '\n┠ Speed: {Speed} | '
- UPLOADED = 'Uploaded: {Upload}'
- RATIO = '\n┠ Ratio: {Ratio} | '
- TIME = 'Time: {Time}'
- SEED_ENGINE = '\n┠ Engine: {Engine}'
-
- ####--------NON-PROGRESSIVE + NON SEEDING----------
- STATUS_SIZE = '\n┠ Size: {Size}'
- NON_ENGINE = '\n┠ Engine: {Engine}'
-
- ####--------OVERALL MSG FOOTER----------
- USER = '\n┠ User: {User}
| '
- ID = 'ID: {Id}
'
- BTSEL = '\n┠ Select: {Btsel}'
- CANCEL = '\n┖ {Cancel}\n\n'
-
- ####------FOOTER--------
- FOOTER = '⌬ Bot Stats\n'
- TASKS = '┠ Tasks: {Tasks}\n'
- BOT_TASKS = '┠ Tasks: {Tasks}/{Ttask} | AVL: {Free}\n'
- Cpu = '┠ CPU: {cpu}% | '
- FREE = 'F: {free} [{free_p}%]'
- Ram = '\n┠ RAM: {ram}% | '
- uptime = 'UPTIME: {uptime}'
- DL = '\n┖ DL: {DL}/s | '
- UL = 'UL: {UL}/s'
-
- ###--------BUTTONS-------
- PREVIOUS = '⫷'
- REFRESH = 'ᴘᴀɢᴇs\n{Page}'
- NEXT = '⫸'
- # ---------------------
-
- #STOP_DUPLICATE_MSG: ---> clone.py, aria2_listener.py, task_manager.py
- STOP_DUPLICATE = 'File/Folder is already available in Drive.\nHere are {content} list results:'
- # ---------------------
-
- # async def countNode(_, message): ----> gd_count.py
- COUNT_MSG = 'Counting: {LINK}
'
- COUNT_NAME = '{COUNT_NAME}\n┃\n'
- COUNT_SIZE = '┠ Size: {COUNT_SIZE}\n'
- COUNT_TYPE = '┠ Type: {COUNT_TYPE}\n'
- COUNT_SUB = '┠ SubFolders: {COUNT_SUB}\n'
- COUNT_FILE = '┠ Files: {COUNT_FILE}\n'
- COUNT_CC = '┖ By: {COUNT_CC}\n'
- # ---------------------
-
- # LIST ---> gd_list.py
- LIST_SEARCHING = 'Searching for {NAME}'
- LIST_FOUND = 'Found {NO} result for {NAME}'
- LIST_NOT_FOUND = 'No result found for {NAME}'
- # ---------------------
-
- # async def mirror_status(_, message): ----> status.py
- NO_ACTIVE_DL = '''No Active Downloads!
-
-⌬ Bot Stats
-┠ CPU: {cpu}% | F: {free} [{free_p}%]
-┖ RAM: {ram} | UPTIME: {uptime}
- '''
- # ---------------------
-
- # USER Setting --> user_setting.py
- USER_SETTING = '''㊂ User Settings :
-
-┎ Name : {NAME} ( {ID}
)
-┠ Username : {USERNAME}
-┠ Telegram DC : {DC}
-┖ Language : {LANG}'''
-
- UNIVERSAL = '''㊂ Universal Settings : {NAME}
-
-┎ YT-DLP Options : {YT}
-┠ Daily Tasks : {DT}
per day
-┠ Last Bot Used : {LAST_USED}
-┠ MediaInfo Mode : {MEDIAINFO}
-┠ Save Mode : {SAVE_MODE}
-┖ User Bot PM : {BOT_PM}
'''
-
- MIRROR = '''㊂ Mirror/Clone Settings : {NAME}
-
-┎ RClone Config : {RCLONE}
-┠ Mirror Prefix : {MPREFIX}
-┠ Mirror Suffix : {MSUFFIX}
-┠ Mirror Remname : {MREMNAME}
-┠ DDL Server(s) : {DDL_SERVER}
-┠ User TD Mode : {TMODE}
-┠ Total User TD(s) : {USERTD}
-┖ Daily Mirror : {DM}
per day'''
-
- LEECH = '''㊂ Leech Settings for {NAME}
-
-┎ Daily Leech : {DL}
per day
-┠ Leech Type : {LTYPE}
-┠ Custom Thumbnail : {THUMB}
-┠ Leech Split Size : {SPLIT_SIZE}
-┠ Equal Splits : {EQUAL_SPLIT}
-┠ Media Group : {MEDIA_GROUP}
-┠ Leech Caption : {LCAPTION}
-┠ Leech Prefix : {LPREFIX}
-┠ Leech Suffix : {LSUFFIX}
-┠ Leech Dumps : {LDUMP}
-┖ Leech Remname : {LREMNAME}
'''
+ #: The bar message
+ BAR: str = '\n┃ {Bar}'
+ #: The processed message
+ PROCESSED: str = '\n┠ Processed: {Processed}'
+ #: The status message
+ STATUS: str = '\n┠ Status: {Status}'
+ #: The eta message
+ ETA: str = ' | ETA: {Eta}'
+ #: The speed message
+ SPEED: str = '\n┠ Speed: {Speed}'
+ #: The elapsed message
+ ELAPSED: str = ' | Elapsed: {Elapsed}'
+ #: The engine message
+ ENGINE: str = '\n┠ Engine: {Engine}'
+ #: The mode message
+ STA_MODE: str = '\n┠ Mode: {Mode}'
+ #: The seeders message
+ SEEDERS: str = '\n┠ Seeders: {Seeders} | '
+ #: The leechers message
+ LEECHERS: str = 'Leechers: {Leechers}'
+
+ #####---------SEEDING-------
+ #: The seed size message
+ SEED_SIZE: str = '\n┠ Size: {Size}'
+ #: The seed speed message
+ SEED_SPEED: str = '\n┠ Speed: {Speed} | '
+ #: The uploaded message
+ UPLOADED: str = 'Uploaded: {Upload}'
+ #: The ratio message
+ RATIO: str = '\n┠ Ratio: {Ratio} | '
+ #: The time message
+ TIME: str = '\n┠ Time: {Time}'
+ #: The seed engine message
+ SEED_ENGINE: str = '\n┠ Engine: {Engine}'
+
+ #####---------NON-PROGRESSIVE + NON SEEDING-------
+ #: The status size message
+ STATUS_SIZE: str = '\n┠ Size: {Size}'
+ #: The non engine message
+ NON_ENGINE: str = '\n┠ Engine: {Engine}'
+
+ #####---------OVERALL MSG FOOTER----------
+ #: The user message
+ USER: str = '\n┠ User: {User}
| '
+ #: The id message
+ ID: str = 'ID: {Id}
'
+ #: The btsel message
+ BTSEL: str = '\n┠ Select: {Btsel}'
+ #: The cancel message
+ CANCEL: str = '\n┖ {Cancel}\n\n'
+
+ #: The footer message
+ FOOTER: str = '⌬ Bot Stats\n'
+ #: The tasks message
+ TASKS: str = '┠ Tasks: {Tasks}\n'
+ #: The bot tasks message
+ BOT_TASKS: str = '┠ Tasks: {Tasks}/{Ttask} | AVL: {Free}\n'
+ #: The cpu message
+ Cpu: str = '┠ CPU: {cpu}% | '
+ #: The free message
+ FREE: str = 'F: {free} [{free_p}%]'
+ #: The ram message
+ Ram: str = '\n┠ RAM: {ram}% | '
+ #: The uptime message
+ uptime: str = 'UPTIME: {uptime}'
+ #: The dl message
+ DL: str = '\n┖ DL: {DL}/s | '
+ #: The ul message
+ UL: str = 'UL: {UL}/s'
+
+ #####---------BUTTONS-------
+ #: The previous button
+ PREVIOUS: str = '⫷'
+ #: The refresh button
+ REFRESH: str = 'ᴘᴀɢᴇs\n{Page}'
+ #: The next button
+ NEXT: str = '⫸'
+ # ----------------------
+
+ # ----------------------
+ # Message styles for clone
+ # ----------------------
+
+ #: The stop duplicate message
+ STOP_DUPLICATE: str = 'File/Folder is already available in Drive.\nHere are {content} list results:'
+ # ----------------------
+
+ # ----------------------
+ # Message styles for gd_count
+ # ----------------------
+
+ #: The count msg message
+ COUNT_MSG: str = 'Counting: {LINK}
'
+ #: The count name message
+ COUNT_NAME: str = '{COUNT_NAME}\n┃\n'
+ #: The count size message
+ COUNT_SIZE: str = '┠ Size: {COUNT_SIZE}\n'
+ #: The count type message
+ COUNT_TYPE: str = '┠ Type: {COUNT_TYPE}\n'
+ #: The count sub message
+ COUNT_SUB: str = '┠ SubFolders: {COUNT_SUB}\n'
+ #:
diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py
index 101e28787f..9c5ab87b2a 100644
--- a/bot/modules/authorize.py
+++ b/bot/modules/authorize.py
@@ -1,147 +1,95 @@
#!/usr/bin/env python3
from pyrogram.handlers import MessageHandler
from pyrogram.filters import command, regex
+from typing import Optional
from bot import user_data, DATABASE_URL, bot
-from bot.helper.telegram_helper.message_utils import sendMessage
+from bot.helper.telegram_helper.message_utils import send_message
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
-from bot.helper.ext_utils.db_handler import DbManger
+from bot.helper.ext_utils.db_handler import DbManager
from bot.helper.ext_utils.bot_utils import update_user_ldata
-async def authorize(client, message):
- msg = message.text.split()
- if len(msg) > 1:
- id_ = int(msg[1].strip())
- elif reply_to := message.reply_to_message:
- id_ = reply_to.from_user.id
- else:
- id_ = message.chat.id
- if id_ in user_data and user_data[id_].get('is_auth'):
- msg = 'Already Authorized!'
- else:
- update_user_ldata(id_, 'is_auth', True)
- if DATABASE_URL:
- await DbManger().update_user_data(id_)
- msg = 'Authorized'
- await sendMessage(message, msg)
+async def handle_authorization(context, message, is_authorize: bool) -> None:
+ """Handles user authorization or unauthorization."""
+ user_id = get_user_id(message)
+ if user_id in user_data:
+ is_already_authorized = user_data[user_id].get("is_auth")
+ if is_authorize and is_already_authorized:
+ await send_message(message, "Already Authorized!")
+ return
+ if not is_authorize and not is_already_authorized:
+ await send_message(message, "Already Unauthorized!")
+ return
+ update_user_ldata(user_id, "is_auth", is_authorize)
+ if DATABASE_URL:
+ async with DbManager() as db_manager:
+ try:
+ await db_manager.update_user_data(user_id)
+ except Exception as e:
+ await send_message(message, f"Error updating user data: {e}")
+ return
-async def unauthorize(client, message):
- msg = message.text.split()
- if len(msg) > 1:
- id_ = int(msg[1].strip())
- elif reply_to := message.reply_to_message:
- id_ = reply_to.from_user.id
+ if is_authorize:
+ await send_message(message, "Authorized")
else:
- id_ = message.chat.id
- if id_ not in user_data or user_data[id_].get('is_auth'):
- update_user_ldata(id_, 'is_auth', False)
- if DATABASE_URL:
- await DbManger().update_user_data(id_)
- msg = 'Unauthorized'
- else:
- msg = 'Already Unauthorized!'
- await sendMessage(message, msg)
+ await send_message(message, "Unauthorized")
-async def addSudo(client, message):
- id_ = ""
- msg = message.text.split()
- if len(msg) > 1:
- id_ = int(msg[1].strip())
- elif reply_to := message.reply_to_message:
- id_ = reply_to.from_user.id
- if id_:
- if id_ in user_data and user_data[id_].get('is_sudo'):
- msg = 'Already Sudo!'
- else:
- update_user_ldata(id_, 'is_sudo', True)
- if DATABASE_URL:
- await DbManger().update_user_data(id_)
- msg = 'Promoted as Sudo'
- else:
- msg = "Give User's ID or Reply to User's message of whom you want to Promote as Sudo"
- await sendMessage(message, msg)
+async def handle_sudo(context, message, is_add: bool) -> None:
+ """Handles adding or removing sudo users."""
+ user_id = get_user_id(message)
+ if user_id in user_data:
+ is_sudo = user_data[user_id].get("is_sudo")
+ if is_add and is_sudo:
+ await send_message(message, "Already Sudo!")
+ return
+ if not is_add and not is_sudo:
+ await send_message(message, "Not a Sudo User, Already Demoted")
+ return
+ update_user_ldata(user_id, "is_sudo", is_add)
+ if DATABASE_URL:
+ async with DbManager() as db_manager:
+ try:
+ await db_manager.update_user_data(user_id)
+ except Exception as e:
+ await send_message(message, f"Error updating user data: {e}")
+ return
-async def removeSudo(client, message):
- id_ = ""
- msg = message.text.split()
- if len(msg) > 1:
- id_ = int(msg[1].strip())
- elif reply_to := message.reply_to_message:
- id_ = reply_to.from_user.id
- if id_:
- if id_ in user_data and not user_data[id_].get('is_sudo'):
- msg = 'Not a Sudo User, Already Demoted'
- else:
- update_user_ldata(id_, 'is_sudo', False)
- if DATABASE_URL:
- await DbManger().update_user_data(id_)
- msg = 'Demoted'
+ if is_add:
+ await send_message(message, "Promoted as Sudo")
else:
- msg = "Give User's ID or Reply to User's message of whom you want to Demote"
- await sendMessage(message, msg)
+ await send_message(message, "Demoted")
-async def addBlackList(_, message):
- id_ = ""
- msg = message.text.split()
- if len(msg) > 1:
- id_ = int(msg[1].strip())
- elif reply_to := message.reply_to_message:
- id_ = reply_to.from_user.id
- if id_:
- if id_ in user_data and user_data[id_].get('is_blacklist'):
- msg = 'User Already BlackListed!'
- else:
- update_user_ldata(id_, 'is_blacklist', True)
- if DATABASE_URL:
- await DbManger().update_user_data(id_)
- msg = 'User BlackListed'
- else:
- msg = "Give ID or Reply To message of whom you want to blacklist."
- await sendMessage(message, msg)
+async def handle_blacklist(context, message, is_add: bool) -> None:
+ """Handles adding or removing users from the blacklist."""
+ user_id = get_user_id(message)
+ if user_id in user_data:
+ is_blacklisted = user_data[user_id].get("is_blacklist")
+ if is_add and is_blacklisted:
+ await send_message(message, "User Already BlackListed!")
+ return
+ if not is_add and not is_blacklisted:
+ await send_message(message, "User Already Freed")
+ return
+ update_user_ldata(user_id, "is_blacklist", is_add)
+ if DATABASE_URL:
+ async with DbManager() as db_manager:
+ try:
+ await db_manager.update_user_data(user_id)
+ except Exception as e:
+ await send_message(message, f"Error updating user data: {e}")
+ return
-async def rmBlackList(_, message):
- id_ = ""
- msg = message.text.split()
- if len(msg) > 1:
- id_ = int(msg[1].strip())
- elif reply_to := message.reply_to_message:
- id_ = reply_to.from_user.id
- if id_:
- if id_ in user_data and not user_data[id_].get('is_blacklist'):
- msg = 'User Already Freed'
- else:
- update_user_ldata(id_, 'is_blacklist', False)
- if DATABASE_URL:
- await DbManger().update_user_data(id_)
- msg = 'User Set Free as Bird!'
+ if is_add:
+ await send_message(message, "User BlackListed")
else:
- msg = "Give ID or Reply To message of whom you want to remove from blacklisted"
- await sendMessage(message, msg)
-
-
-async def black_listed(_, message):
- await sendMessage(message, "BlackListed Detected, Restricted from Bot")
-
-
-bot.add_handler(MessageHandler(authorize, filters=command(
- BotCommands.AuthorizeCommand) & CustomFilters.sudo))
-bot.add_handler(MessageHandler(unauthorize, filters=command(
- BotCommands.UnAuthorizeCommand) & CustomFilters.sudo))
-bot.add_handler(MessageHandler(addSudo, filters=command(
- BotCommands.AddSudoCommand) & CustomFilters.sudo))
-bot.add_handler(MessageHandler(removeSudo, filters=command(
- BotCommands.RmSudoCommand) & CustomFilters.sudo))
-bot.add_handler(MessageHandler(addBlackList, filters=command(
- BotCommands.AddBlackListCommand) & CustomFilters.sudo))
-bot.add_handler(MessageHandler(rmBlackList, filters=command(
- BotCommands.RmBlackListCommand) & CustomFilters.sudo))
-bot.add_handler(MessageHandler(black_listed, filters=regex(r'^/')
- & CustomFilters.authorized & CustomFilters.blacklisted))
-
\ No newline at end of file
+ await send_message(message, "User Set Free as Bird!")
+
+
+def get_user_id(message:
diff --git a/bot/modules/broadcast.py b/bot/modules/broadcast.py
index ae5b074849..e4dec09c8c 100644
--- a/bot/modules/broadcast.py
+++ b/bot/modules/broadcast.py
@@ -21,22 +21,22 @@ async def broadcast(_, message):
bc_id, forwarded, quietly, deleted, edited = '', False, False, False, False
if not DATABASE_URL:
return await sendMessage(message, 'DATABASE_URL not provided!')
- rply = message.reply_to_message
- if len(message.command) > 1:
- if not message.command[1].startswith('-'):
- bc_id = message.command[1] if bc_cache.get(message.command[1], False) else ''
+ args = message.command[1:]
+ if len(args) > 0:
+ if not args[0].startswith('-'):
+ bc_id = args[0] if bc_cache.get(args[0], False) else ''
if not bc_id:
return await sendMessage(message, "Broadcast ID not found! After Restart, you can't edit or delete broadcasted messages...")
- for arg in message.command:
- if arg in ['-f', '-forward'] and rply:
+ for arg in args:
+ if arg in ['-f', '-forward'] and message.reply_to_message:
forwarded = True
- if arg in ['-q', '-quiet'] and rply:
+ if arg in ['-q', '-quiet'] and message.reply_to_message:
quietly = True
elif arg in ['-d', '-delete'] and bc_id:
deleted = True
- elif arg in ['-e', '-edit'] and bc_id and rply:
+ elif arg in ['-e', '-edit'] and bc_id and message.reply_to_message:
edited = True
- if not bc_id and not rply:
+ if not bc_id and not message.reply_to_message:
return await sendMessage(message, '''By replying to msg to Broadcast:
/broadcast bc_id -d -e -f -q
@@ -58,11 +58,10 @@ async def broadcast(_, message):
t, s, b, d, u = 0, 0, 0, 0, 0
if deleted:
temp_wait = await sendMessage(message, 'Deleting the Broadcasted Message! Please Wait ...')
- for msg in (msgs:=bc_cache[bc_id]):
+ for msg in (msgs:=bc_cache.get(bc_id, ())):
try:
await msg.delete()
await sleep(0.5)
- msgs.pop(msgs.index(msg))
s += 1
except:
u += 1
@@ -75,16 +74,16 @@ async def broadcast(_, message):
Broadcast ID: {bc_id}
''')
elif edited:
temp_wait = await sendMessage(message, 'Editing the Broadcasted Message! Please Wait ...')
- for msg in bc_cache[bc_id]:
+ for msg in bc_cache.get(bc_id, ()):
if hasattr(msg, "forward_from"):
return await editMessage(temp_wait, "Forwarded Messages can't be Edited, Only can be Deleted !")
try:
- await msg.edit(text=rply.text, entities=rply.entities, reply_markup=rply.reply_markup)
+ await msg.edit(text=message.reply_to_message.text, entities=message.reply_to_message.entities, reply_markup=message.reply_to_message.reply_markup)
await sleep(0.5)
s += 1
except FloodWait as e:
await sleep(e.value)
- await msg.edit(text=rply.text, entities=rply.entities, reply_markup=rply.reply_markup)
+ await msg.edit(text=message.reply_to_message.text, entities=message.reply_to_message.entities, reply_markup=message.reply_to_message.reply_markup)
except:
u += 1
t += 1
@@ -107,16 +106,16 @@ async def broadcast(_, message):
for uid in (await DbManger().get_pm_uids()):
try:
if forwarded:
- bc_msg = await rply.forward(uid, disable_notification=quietly)
+ bc_msg = await message.reply_to_message.forward(uid, disable_notification=quietly)
else:
- bc_msg = await rply.copy(uid, disable_notification=quietly)
+ bc_msg = await message.reply_to_message.copy(uid, disable_notification=quietly)
s += 1
except FloodWait as e:
await sleep(e.value)
if forwarded:
- bc_msg = await rply.forward(uid, disable_notification=quietly)
+ bc_msg = await message.reply_to_message.forward(uid, disable_notification=quietly)
else:
- bc_msg = await rply.copy(uid, disable_notification=quietly)
+ bc_msg = await message.reply_to_message.copy(uid, disable_notification=quietly)
s += 1
except UserIsBlocked:
await DbManger().rm_pm_user(uid)
@@ -136,4 +135,4 @@ async def broadcast(_, message):
await editMessage(pls_wait, status.format(**locals()) + f"\n\nElapsed Time: {get_readable_time(time() - start_time)}
\nBroadcast ID: {bc_hash}
")
-bot.add_handler(MessageHandler(broadcast, filters=command(BotCommands.BroadcastCommand) & CustomFilters.sudo))
\ No newline at end of file
+bot.add_handler(MessageHandler(broadcast, filters=command(BotCommands.BroadcastCommand) & CustomFilters.sudo))
diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py
index cbfef3e830..9058969f8c 100644
--- a/bot/modules/cancel_mirror.py
+++ b/bot/modules/cancel_mirror.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-from asyncio import sleep
+import asyncio
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.filters import command, regex
@@ -7,49 +7,40 @@
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, auto_delete_message
-from bot.helper.ext_utils.bot_utils import getDownloadByGid, getAllDownload, MirrorStatus, new_task
+from bot.helper.ext_utils.bot_utils import get_download_by_gid, get_all_downloads, MirrorStatus, new_task
from bot.helper.telegram_helper import button_build
async def cancel_mirror(_, message):
user_id = message.from_user.id
- msg = message.text.split('_', maxsplit=1)
- if len(msg) > 1:
- cmd_data = msg[1].split('@', maxsplit=1)
- if len(cmd_data) > 1 and cmd_data[1].strip() != bot_name:
- return
- gid = cmd_data[0]
- dl = await getDownloadByGid(gid)
- if dl is None:
- await sendMessage(message, f"GID: {gid}
Not Found.")
- return
- elif reply_to_id := message.reply_to_message_id:
- async with download_dict_lock:
- dl = download_dict.get(reply_to_id, None)
- if dl is None:
- await sendMessage(message, "This is not an active task!")
- return
- elif len(msg) == 1:
- msg = "Reply to an active Command message which was used to start the download" \
- f" or send /{BotCommands.CancelMirror}_GID
to cancel it!"
- await sendMessage(message, msg)
- return
- if OWNER_ID != user_id and dl.message.from_user.id != user_id and \
- (user_id not in user_data or not user_data[user_id].get('is_sudo')):
- await sendMessage(message, "This task is not for you!")
+ args = message.text.split('_', maxsplit=1)
+ if len(args) > 1:
+ gid, cmd_name = args[1].split('@', maxsplit=1)
+ else:
+ return await send_message(message, "Invalid format. Use /cancel_mirror_gid_botname or reply to an active task.")
+
+ if cmd_name != bot_name:
return
- obj = dl.download()
- await obj.cancel_download()
+
+ download_info = await get_download_by_gid(gid)
+ if not download_info:
+ return await send_message(message, f"GID: `{gid}` Not Found.")
+
+ if (user_id not in user_data or not user_data[user_id].get('is_sudo')) and download_info.message.from_user.id != user_id:
+ return await send_message(message, "This task is not for you!")
+
+ download_info.download().cancel_download()
async def cancel_all(status):
- matches = await getAllDownload(status)
+ matches = await get_all_downloads(status)
if not matches:
return False
- for dl in matches:
- obj = dl.download()
- await obj.cancel_download()
- await sleep(1)
+
+ for download_info in matches:
+ download_info.download().cancel_download()
+ await asyncio.sleep(1)
+
return True
@@ -57,8 +48,8 @@ async def cancell_all_buttons(_, message):
async with download_dict_lock:
count = len(download_dict)
if count == 0:
- await sendMessage(message, "No active tasks!")
- return
+ return await send_message(message, "No active tasks!")
+
buttons = button_build.ButtonMaker()
buttons.ibutton("Downloading", f"canall {MirrorStatus.STATUS_DOWNLOADING}")
buttons.ibutton("Uploading", f"canall {MirrorStatus.STATUS_UPLOADING}")
@@ -72,7 +63,7 @@ async def cancell_all_buttons(_, message):
buttons.ibutton("All", "canall all")
buttons.ibutton("Close", "canall close")
button = buttons.build_menu(2)
- can_msg = await sendMessage(message, 'Choose tasks to cancel.', button)
+ can_msg = await send_message(message, 'Choose tasks to cancel.', button)
await auto_delete_message(message, can_msg)
@@ -83,12 +74,12 @@ async def cancel_all_update(_, query):
reply_to = message.reply_to_message
await query.answer()
if data[1] == 'close':
- await deleteMessage(reply_to)
- await deleteMessage(message)
+ await delete_message(reply_to)
+ await delete_message(message)
else:
res = await cancel_all(data[1])
if not res:
- await sendMessage(reply_to, f"No matching tasks for {data[1]}!")
+ await send_message(reply_to, f"No matching tasks for {data[1]}!")
bot.add_handler(MessageHandler(cancel_mirror, filters=regex(
diff --git a/bot/modules/category_select.py b/bot/modules/category_select.py
index a4d54714bc..0e47c866c0 100644
--- a/bot/modules/category_select.py
+++ b/bot/modules/category_select.py
@@ -1,91 +1,75 @@
#!/usr/bin/env python3
+import re
+from time import time
+
from pyrogram.filters import command, regex
from pyrogram.handlers import CallbackQueryHandler, MessageHandler
-from time import time
+from pyrogram.types import CallbackQuery
from bot import bot, bot_cache, categories_dict, download_dict, download_dict_lock
-from bot.helper.ext_utils.bot_utils import MirrorStatus, arg_parser, fetch_user_tds, fetch_user_dumps, getDownloadByGid, is_gdrive_link, new_task, sync_to_async, get_readable_time
+from bot.helper.ext_utils.bot_utils import MirrorStatus, arg_parser, fetch_user_tds, fetch_user_dumps, get_download_by_gid, is_gdrive_link, new_task, sync_to_async, get_readable_time
from bot.helper.ext_utils.help_messages import CATEGORY_HELP_MESSAGE
+from bot.helper.ext_utils.telegram_utils import edit_message, send_message, open_category_btns
from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
-from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.button_build import ButtonMaker
from bot.helper.telegram_helper.filters import CustomFilters
-from bot.helper.telegram_helper.message_utils import editMessage, sendMessage, open_category_btns
+from bot.helper.telegram_helper.message_utils import send_message as sendMessage
async def change_category(client, message):
- if not message.from_user:
- return
user_id = message.from_user.id
-
- text = message.text.split('\n')
- input_list = text[0].split(' ')
-
- arg_base = {'link': '',
- '-id': '',
- '-index': ''}
-
- args = arg_parser(input_list[1:], arg_base)
-
+ args = arg_parser(message.text.split()[1:], {'link': '', '-id': '', '-index': ''})
+ gid = args['link']
drive_id = args['-id']
index_link = args['-index']
- if drive_id and is_gdrive_link(drive_id):
- drive_id = GoogleDriveHelper.getIdFromUrl(drive_id)
-
- dl = None
- if gid := args['link']:
- dl = await getDownloadByGid(gid)
- if not dl:
- await sendMessage(message, f"GID: {gid}
Not Found.")
+ dl = get_download_by_gid(gid) if gid else None
+ if dl is None:
+ reply_dl = download_dict.get(message.reply_to_message.id)
+ if reply_dl is None:
+ await send_message(message, CATEGORY_HELP_MESSAGE)
return
- if reply_to := message.reply_to_message:
- async with download_dict_lock:
- dl = download_dict.get(reply_to.id, None)
- if not dl:
- await sendMessage(message, "This is not an active task!")
- return
- if not dl:
- await sendMessage(message, CATEGORY_HELP_MESSAGE)
- return
- if not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id:
- await sendMessage(message, "This task is not for you!")
+ dl = reply_dl
+
+ if dl and dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUED]:
+ await send_message(message, f'Task should be on {MirrorStatus.STATUS_DOWNLOADING} or {MirrorStatus.STATUS_PAUSED} or {MirrorStatus.STATUS_QUEUED}')
return
- if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]:
- await sendMessage(message, f'Task should be on {MirrorStatus.STATUS_DOWNLOADING} or {MirrorStatus.STATUS_PAUSED} or {MirrorStatus.STATUS_QUEUEDL}')
+
+ if dl and not await CustomFilters.sudo(client, message) and dl.message.from_user.id != user_id:
+ await send_message(message, "This task is not for you!")
return
- listener = dl.listener() if dl and hasattr(dl, 'listener') else None
- if listener and not listener.isLeech:
+
+ if dl and not dl.listener.isLeech:
if not index_link and not drive_id and categories_dict:
drive_id, index_link, is_cancelled = await open_category_btns(message)
if is_cancelled:
return
if not index_link and not drive_id:
- return await sendMessage(message, "Time out")
+ return await send_message(message, "Time out")
msg = 'Task has been Updated Successfully!'
if drive_id:
- if not (folder_name := await sync_to_async(GoogleDriveHelper().getFolderData, drive_id)):
- return await sendMessage(message, "Google Drive id validation failed!!")
- if listener.drive_id and listener.drive_id == drive_id:
+ if not (folder_name := await sync_to_async(GoogleDriveHelper().get_folder_data, drive_id)):
+ return await send_message(message, "Google Drive id validation failed!!")
+ if dl.listener.drive_id and dl.listener.drive_id == drive_id:
msg += f'\n\nFolder name : {folder_name} Already selected'
else:
msg += f'\n\nFolder name : {folder_name}'
- listener.drive_id = drive_id
+ dl.listener.drive_id = drive_id
if index_link:
- listener.index_link = index_link
+ dl.listener.index_link = index_link
msg += f'\n\nIndex Link : {index_link}
'
- return await sendMessage(message, msg)
+ return await send_message(message, msg)
else:
- await sendMessage(message, "Can not change Category for this task!")
+ await send_message(message, "Can not change Category for this task!")
@new_task
-async def confirm_category(client, query):
+async def confirm_category(client, query: CallbackQuery):
user_id = query.from_user.id
- data = query.data.split(maxsplit=3)
+ data = re.split(r'\s+', query.data)
msg_id = int(data[2])
if msg_id not in bot_cache:
- return await editMessage(query.message, 'Old Task')
+ return await edit_message(query.message, 'Old Task')
elif user_id != int(data[1]) and not await CustomFilters.sudo(client, query):
return await query.answer(text="This task is not for you!", show_alert=True)
elif data[3] == "sdone":
@@ -109,16 +93,16 @@ async def confirm_category(client, query):
buttons.ibutton(f'{"✅️" if cat_name == _name else ""} {_name}', f"scat {user_id} {msg_id} {_name.replace(' ', '_')}")
buttons.ibutton('Cancel', f'scat {user_id} {msg_id} scancel', 'footer')
buttons.ibutton(f'Done ({get_readable_time(60 - (time() - bot_cache[msg_id][4]))})', f'scat {user_id} {msg_id} sdone', 'footer')
- await editMessage(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}
\n\nTimeout: 60 sec", buttons.build_menu(3))
+ await edit_message(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}
\n\nTimeout: 60 sec", buttons.build_menu(3))
@new_task
-async def confirm_dump(client, query):
+async def confirm_dump(client, query: CallbackQuery):
user_id = query.from_user.id
- data = query.data.split(maxsplit=3)
+ data = re.split(r'\s+', query.data)
msg_id = int(data[2])
if msg_id not in bot_cache:
- return await editMessage(query.message, 'Old Task')
+ return await edit_message(query.message, 'Old Task')
elif user_id != int(data[1]) and not await CustomFilters.sudo(client, query):
return await query.answer(text="This task is not for you!", show_alert=True)
elif data[3] == "ddone":
@@ -139,9 +123,9 @@ async def confirm_dump(client, query):
buttons.ibutton('Upload in All', f'dcat {user_id} {msg_id} All', 'header')
buttons.ibutton('Cancel', f'dcat {user_id} {msg_id} dcancel', 'footer')
buttons.ibutton(f'Done ({get_readable_time(60 - (time() - bot_cache[msg_id][3]))})', f'dcat {user_id} {msg_id} ddone', 'footer')
- await editMessage(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}
\n\nTimeout: 60 sec", buttons.build_menu(3))
+ await edit_message(query.message, f"Select the category where you want to upload\n\nUpload Category: {cat_name}
\n\nTimeout: 60 sec", buttons.build_menu(3))
bot.add_handler(MessageHandler(change_category, filters=command(BotCommands.CategorySelect) & CustomFilters.authorized))
bot.add_handler(CallbackQueryHandler(confirm_category, filters=regex("^scat")))
-bot.add_handler(CallbackQueryHandler(confirm_dump, filters=regex("^dcat")))
\ No newline at end of file
+bot.add_handler(CallbackQueryHandler(confirm_dump, filters=regex("^dcat")))
diff --git a/bot/modules/clone.py b/bot/modules/clone.py
index 391ca25655..2f4b8c59cf 100644
--- a/bot/modules/clone.py
+++ b/bot/modules/clone.py
@@ -1,293 +1,610 @@
#!/usr/bin/env python3
+import os
+import sys
+import asyncio
+import json
+import random
+import re
+import shutil
+from urllib.parse import urlparse
+from functools import lru_cache
+from typing import List, Dict, Union, Tuple, Optional
+import aiofiles
+import aiohttp
+import pyrogram
from pyrogram.handlers import MessageHandler
from pyrogram.filters import command
-from random import SystemRandom
-from string import ascii_letters, digits
-from asyncio import sleep, gather
-from aiofiles.os import path as aiopath
-from cloudscraper import create_scraper as cget
-from json import loads, dumps as jdumps
+from pyrogram.types import Message, InlineKeyboardButton, InlineKeyboardMarkup, CallbackQuery
+from pyrogram.errors import FloodWait, UserIsBlocked, MessageNotModified, MessageEmpty, MessageIdInvalid, ChatAdminRequired
+from cloudscraper import create_scraper
+from gdown import download as gdown_download
+from gdown import GDriveFileTransferError
+from gdown.download import DownloadError
+from gdown.gdrive import GDriveFile
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
+from gdown.service import ServiceException
+from gdown.service import ServiceUnavailable
-from bot import LOGGER, download_dict, download_dict_lock, categories_dict, config_dict, bot
-from bot.helper.ext_utils.task_manager import limit_checker, task_utils
-from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
-from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, sendStatusMessage, delete_links, auto_delete_message, open_category_btns
-from bot.helper.telegram_helper.filters import CustomFilters
-from bot.helper.telegram_helper.bot_commands import BotCommands
-from bot.helper.telegram_helper.button_build import ButtonMaker
-from bot.helper.mirror_utils.status_utils.gdrive_status import GdriveStatus
-from bot.helper.ext_utils.bot_utils import is_gdrive_link, new_task, get_readable_file_size, sync_to_async, fetch_user_tds, is_share_link, new_task, is_rclone_path, cmd_exec, get_telegraph_list, arg_parser
-from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
-from bot.helper.mirror_utils.download_utils.direct_link_generator import direct_link_generator
-from bot.helper.mirror_utils.rclone_utils.list import RcloneList
-from bot.helper.mirror_utils.rclone_utils.transfer import RcloneTransferHelper
-from bot.helper.ext_utils.help_messages import CLONE_HELP_MESSAGE
-from bot.helper.mirror_utils.status_utils.rclone_status import RcloneStatus
-from bot.helper.listeners.tasks_listener import MirrorLeechListener
-from bot.helper.themes import BotTheme
-
-
-async def rcloneNode(client, message, link, dst_path, rcf, tag):
- if link == 'rcl':
- link = await RcloneList(client, message).get_rclone_path('rcd')
- if not is_rclone_path(link):
- await sendMessage(message, link)
- return
-
- if link.startswith('mrcc:'):
- link = link.split('mrcc:', 1)[1]
- config_path = f'rclone/{message.from_user.id}.conf'
- else:
- config_path = 'rclone.conf'
-
- if not await aiopath.exists(config_path):
- await sendMessage(message, f"RClone Config: {config_path} not Exists!")
- return
-
- if dst_path == 'rcl' or config_dict['RCLONE_PATH'] == 'rcl':
- dst_path = await RcloneList(client, message).get_rclone_path('rcu', config_path)
- if not is_rclone_path(dst_path):
- await sendMessage(message, dst_path)
- return
-
- dst_path = (dst_path or config_dict['RCLONE_PATH']).strip('/')
- if not is_rclone_path(dst_path):
- await sendMessage(message, 'Given Wrong RClone Destination!')
- return
- if dst_path.startswith('mrcc:'):
- if config_path != f'rclone/{message.from_user.id}.conf':
- await sendMessage(message, 'You should use same rclone.conf to clone between paths!')
- return
- elif config_path != 'rclone.conf':
- await sendMessage(message, 'You should use same rclone.conf to clone between paths!')
- return
-
- remote, src_path = link.split(':', 1)
- src_path = src_path .strip('/')
-
- cmd = ['rclone', 'lsjson', '--fast-list', '--stat',
- '--no-modtime', '--config', config_path, f'{remote}:{src_path}']
- res = await cmd_exec(cmd)
- if res[2] != 0:
- if res[2] != -9:
- msg = f'Error: While getting RClone Stats. Path: {remote}:{src_path}. Stderr: {res[1][:4000]}'
- await sendMessage(message, msg)
- return
- rstat = loads(res[0])
- if rstat['IsDir']:
- name = src_path.rsplit('/', 1)[-1] if src_path else remote
- dst_path += name if dst_path.endswith(':') else f'/{name}'
- mime_type = 'Folder'
- else:
- name = src_path.rsplit('/', 1)[-1]
- mime_type = rstat['MimeType']
-
- listener = MirrorLeechListener(message, tag=tag, source_url=link)
- await listener.onDownloadStart()
-
- RCTransfer = RcloneTransferHelper(listener, name)
- LOGGER.info(f'Clone Started: Name: {name} - Source: {link} - Destination: {dst_path}')
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
- async with download_dict_lock:
- download_dict[message.id] = RcloneStatus(
- RCTransfer, message, gid, 'cl', listener.upload_details)
- await sendStatusMessage(message)
- link, destination = await RCTransfer.clone(config_path, remote, src_path, dst_path, rcf, mime_type)
- if not link:
- return
- LOGGER.info(f'Cloning Done: {name}')
- cmd1 = ['rclone', 'lsf', '--fast-list', '-R',
- '--files-only', '--config', config_path, destination]
- cmd2 = ['rclone', 'lsf', '--fast-list', '-R',
- '--dirs-only', '--config', config_path, destination]
- cmd3 = ['rclone', 'size', '--fast-list', '--json',
- '--config', config_path, destination]
- res1, res2, res3 = await gather(cmd_exec(cmd1), cmd_exec(cmd2), cmd_exec(cmd3))
- if res1[2] != res2[2] != res3[2] != 0:
- if res1[2] == -9:
- return
- files = None
- folders = None
- size = 0
- LOGGER.error(f'Error: While getting RClone Stats. Path: {destination}. Stderr: {res1[1][:4000]}')
- else:
- files = len(res1[0].split("\n"))
- folders = len(res2[0].split("\n"))
- rsize = loads(res3[0])
- size = rsize['bytes']
- await listener.onUploadComplete(link, size, files, folders, mime_type, name, destination)
-
-
-async def gdcloneNode(message, link, listen_up):
- org_link = None
- if not is_gdrive_link(link) and is_share_link(link):
- org_link = link
- process_msg = await sendMessage(message, f"Processing Link: {link}
")
- try:
- link = await sync_to_async(direct_link_generator, link)
- LOGGER.info(f"Generated link: {link}")
- await editMessage(process_msg, f"Generated Link: {link}
")
- except DirectDownloadLinkException as e:
- LOGGER.error(str(e))
- if str(e).startswith('ERROR:'):
- await editMessage(process_msg, str(e))
- return
- await deleteMessage(process_msg)
- if is_gdrive_link(link):
- gd = GoogleDriveHelper()
- name, mime_type, size, files, _ = await sync_to_async(gd.count, link)
- if org_link:
- cget().request('POST', "https://wzmlcontribute.vercel.app/contribute", headers={"Content-Type": "application/json"}, data=jdumps({"name": name, "link": org_link, "size": get_readable_file_size(size)}))
- if mime_type is None:
- await sendMessage(message, name)
- return
- if config_dict['STOP_DUPLICATE']:
- LOGGER.info('Checking File/Folder if already in Drive...')
- telegraph_content, contents_no = await sync_to_async(gd.drive_list, name, True, True)
- if telegraph_content:
- msg = BotTheme('STOP_DUPLICATE', content=contents_no)
- button = await get_telegraph_list(telegraph_content)
- await sendMessage(message, msg, button)
- return
- listener = MirrorLeechListener(message, tag=listen_up[0], isClone=True, drive_id=listen_up[1], index_link=listen_up[2], source_url=org_link if org_link else link)
- if limit_exceeded := await limit_checker(size, listener):
- await sendMessage(listener.message, limit_exceeded)
- return
- await listener.onDownloadStart()
- LOGGER.info(f'Clone Started: Name: {name} - Source: {link}')
- drive = GoogleDriveHelper(name, listener=listener)
- if files <= 20:
- msg = await sendMessage(message, f"Cloning: {link}
")
- link, size, mime_type, files, folders = await sync_to_async(drive.clone, link, listener.drive_id)
- await deleteMessage(msg)
- else:
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
- async with download_dict_lock:
- download_dict[message.id] = GdriveStatus(
- drive, size, message, gid, 'cl', listener.upload_details)
- await sendStatusMessage(message)
- link, size, mime_type, files, folders = await sync_to_async(drive.clone, link, listener.drive_id)
- if not link:
- return
- LOGGER.info(f'Cloning Done: {name}')
- await listener.onUploadComplete(link, size, files, folders, mime_type, name)
- else:
- btn = ButtonMaker()
- btn.ibutton('Click Here to Read More ..', f'wzmlx {message.from_user.id} help CLONE')
- reply_message = await sendMessage(message, CLONE_HELP_MESSAGE[0], btn.build_menu(1))
- await auto_delete_message(message, reply_message)
-
-
-@new_task
-async def clone(client, message):
- input_list = message.text.split(' ')
-
- arg_base = {'link': '',
- '-i': 0,
- '-up': '', '-upload': '',
- '-rcf': '',
- '-id': '',
- '-index': '',
- '-c': '', '-category': '',
- }
-
- args = arg_parser(input_list[1:], arg_base)
-
- try:
- multi = int(args['-i'])
- except:
- multi = 0
-
- dst_path = args['-up'] or args['-upload']
- rcf = args['-rcf']
- link = args['link']
- drive_id = args['-id']
- index_link = args['-index']
- gd_cat = args['-c'] or args['-category']
-
- if username := message.from_user.username:
- tag = f"@{username}"
- else:
- tag = message.from_user.mention
-
- if not link and (reply_to := message.reply_to_message) and reply_to.text:
- link = reply_to.text.split('\n', 1)[0].strip()
-
- @new_task
- async def __run_multi():
- if multi > 1:
- await sleep(5)
- msg = [s.strip() for s in input_list]
- index = msg.index('-i')
- msg[index+1] = f"{multi - 1}"
- nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1)
- nextmsg = await sendMessage(nextmsg, " ".join(msg))
- nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=nextmsg.id)
- nextmsg.from_user = message.from_user
- await sleep(5)
- clone(client, nextmsg)
-
- __run_multi()
-
- if drive_id and is_gdrive_link(drive_id):
- drive_id = GoogleDriveHelper.getIdFromUrl(drive_id)
-
- if len(link) == 0:
- btn = ButtonMaker()
- btn.ibutton('Cʟɪᴄᴋ Hᴇʀᴇ Tᴏ Rᴇᴀᴅ Mᴏʀᴇ ...', f'wzmlx {message.from_user.id} help CLONE')
- await sendMessage(message, CLONE_HELP_MESSAGE[0], btn.build_menu(1))
- await delete_links(message)
- return
-
- error_msg = []
- error_button = None
- task_utilis_msg, error_button = await task_utils(message)
- if task_utilis_msg:
- error_msg.extend(task_utilis_msg)
-
- if error_msg:
- final_msg = f'User : {tag}\n'
- for __i, __msg in enumerate(error_msg, 1):
- final_msg += f'\n{__i}: {__msg}\n'
- if error_button is not None:
- error_button = error_button.build_menu(2)
- await sendMessage(message, final_msg, error_button)
- await delete_links(message)
- return
-
- if is_rclone_path(link):
- if not await aiopath.exists('rclone.conf') and not await aiopath.exists(f'rclone/{message.from_user.id}.conf'):
- await sendMessage(message, 'RClone Config Not exists!')
- await delete_links(message)
- return
- if not config_dict['RCLONE_PATH'] and not dst_path:
- await sendMessage(message, 'Destination not specified!')
- await delete_links(message)
- return
- await rcloneNode(client, message, link, dst_path, rcf, tag)
- else:
- user_tds = await fetch_user_tds(message.from_user.id)
- if not drive_id and gd_cat:
- merged_dict = {**categories_dict, **user_tds}
- for drive_name, drive_dict in merged_dict.items():
- if drive_name.casefold() == gd_cat.replace('_', ' ').casefold():
- drive_id, index_link = (drive_dict['drive_id'], drive_dict['index_link'])
- break
- if not drive_id and len(user_tds) == 1:
- drive_id, index_link = next(iter(user_tds.values())).values()
- elif not drive_id and (len(categories_dict) > 1 and len(user_tds) == 0 or len(categories_dict) >= 1 and len(user_tds) > 1):
- drive_id, index_link, is_cancelled = await open_category_btns(message)
- if is_cancelled:
- await delete_links(message)
- return
- if drive_id and not await sync_to_async(GoogleDriveHelper().getFolderData, drive_id):
- return await sendMessage(message, "Google Drive ID validation failed!!")
- if not config_dict['GDRIVE_ID'] and not drive_id:
- await sendMessage(message, 'GDRIVE_ID not Provided!')
- await delete_links(message)
- return
- await gdcloneNode(message, link, [tag, drive_id, index_link])
- await delete_links(message)
-
-bot.add_handler(MessageHandler(clone, filters=command(
- BotCommands.CloneCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
diff --git a/bot/modules/gd_count.py b/bot/modules/gd_count.py
index e790f1a8f6..c651e047ba 100644
--- a/bot/modules/gd_count.py
+++ b/bot/modules/gd_count.py
@@ -1,47 +1,72 @@
#!/usr/bin/env python3
+import asyncio
+from functools import wraps
+from typing import Callable, Coroutine
+
+import pyrogram
+from pyrogram.errors import UserIsBlocked, MessageNotModified
from pyrogram.handlers import MessageHandler
-from pyrogram.filters import command
+from pyrogram.filters import command, regex
from bot import bot
from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
-from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage
+from bot.helper.telegram_helper.message_utils import deleteMessage, sendMessage, sendPhoto
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
-from bot.helper.ext_utils.bot_utils import is_gdrive_link, sync_to_async, new_task, get_readable_file_size
+from bot.helper.ext_utils.bot_utils import is_gdrive_link, human_readable
from bot.helper.themes import BotTheme
-@new_task
-async def countNode(_, message):
+async def send_typing_action(func: Callable[[pyrogram.types.Message], Coroutine]):
+ @wraps(func)
+ async def wrapper(message: pyrogram.types.Message, *args, **kwargs):
+ await bot.send_chat_action(message.chat.id, "typing")
+ return await func(message, *args, **kwargs)
+
+ return wrapper
+
+
+@send_typing_action
+async def count_node(client, message):
args = message.text.split()
- if username := message.from_user.username:
- tag = f"@{username}"
- else:
- tag = message.from_user.mention
+ username = message.from_user.username
+ tag = f"@{username}" if username else message.from_user.mention
- link = args[1] if len(args) > 1 else ''
- if len(link) == 0 and (reply_to := message.reply_to_message):
- link = reply_to.text.split(maxsplit=1)[0].strip()
+ link = args[1] if len(args) > 1 else None
+ if not link:
+ reply_to = message.reply_to_message
+ if reply_to:
+ link = reply_to.text.split(maxsplit=1)[0].strip()
if is_gdrive_link(link):
+ try:
+ await deleteMessage(message)
+ except MessageNotModified:
+ pass
+
msg = await sendMessage(message, BotTheme('COUNT_MSG', LINK=link))
gd = GoogleDriveHelper()
- name, mime_type, size, files, folders = await sync_to_async(gd.count, link)
+ name, mime_type, size, files, folders = await gd.count(link)
+
if mime_type is None:
await sendMessage(message, name)
return
+
await deleteMessage(msg)
+
msg = BotTheme('COUNT_NAME', COUNT_NAME=name)
- msg += BotTheme('COUNT_SIZE', COUNT_SIZE=get_readable_file_size(size))
+ msg += BotTheme('COUNT_SIZE', COUNT_SIZE=human_readable(size))
msg += BotTheme('COUNT_TYPE', COUNT_TYPE=mime_type)
+
if mime_type == 'Folder':
msg += BotTheme('COUNT_SUB', COUNT_SUB=folders)
msg += BotTheme('COUNT_FILE', COUNT_FILE=files)
+
msg += BotTheme('COUNT_CC', COUNT_CC=tag)
+ await sendPhoto(message, msg, 'IMAGES')
else:
- msg = 'Send Gdrive link along with command or by replying to the link by command'
- await sendMessage(message, msg, photo='IMAGES')
+ await sendMessage(message, 'Send Gdrive link along with command or by replying to the link by command',
+ photo='IMAGES')
-bot.add_handler(MessageHandler(countNode, filters=command(
- BotCommands.CountCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
+bot.add_handler(MessageHandler(count_node, filters=command(BotCommands.CountCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
diff --git a/bot/modules/gd_list.py b/bot/modules/gd_list.py
index d22cf8b64a..4c9bedb8db 100644
--- a/bot/modules/gd_list.py
+++ b/bot/modules/gd_list.py
@@ -1,7 +1,10 @@
#!/usr/bin/env python3
-from random import choice
+from typing import List, Union
+
+import asyncio
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.filters import command, regex
+from pyrogram.errors import FloodWait
from bot import LOGGER, bot, config_dict
from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
@@ -12,55 +15,58 @@
from bot.helper.ext_utils.bot_utils import sync_to_async, new_task, get_telegraph_list, checking_access
from bot.helper.themes import BotTheme
-
-async def list_buttons(user_id, isRecursive=True):
+async def list_buttons(user_id: int, is_recursive: bool = True) -> List[List[str]]:
buttons = ButtonMaker()
- buttons.ibutton("Only Folders", f"list_types {user_id} folders {isRecursive}")
- buttons.ibutton("Only Files", f"list_types {user_id} files {isRecursive}")
- buttons.ibutton("Both", f"list_types {user_id} both {isRecursive}")
- buttons.ibutton(f"{'✅️' if isRecursive else ''} Recursive", f"list_types {user_id} rec {isRecursive}")
+ buttons.ibutton("Only Folders", f"list_types {user_id} folders {is_recursive}")
+ buttons.ibutton("Only Files", f"list_types {user_id} files {is_recursive}")
+ buttons.ibutton("Both", f"list_types {user_id} both {is_recursive}")
+ buttons.ibutton(f"{'✅️' if is_recursive else ''} Recursive", f"list_types {user_id} rec {is_recursive}")
buttons.ibutton("Cancel", f"list_types {user_id} cancel")
return buttons.build_menu(2)
-
-async def _list_drive(key, message, user_id, item_type, isRecursive):
+async def _list_drive(key: str, message, user_id: int, item_type: str, is_recursive: bool):
LOGGER.info(f"GDrive List: {key}")
gdrive = GoogleDriveHelper()
- telegraph_content, contents_no = await sync_to_async(gdrive.drive_list, key, isRecursive=isRecursive, itemType=item_type, userId=user_id)
+ try:
+ async with gdrive:
+ telegraph_content, contents_no = await sync_to_async(gdrive.drive_list, key, is_recursive=is_recursive, itemType=item_type, userId=user_id)
+ except Exception as e:
+ LOGGER.error(e)
+ await editMessage(message, "An error occurred while listing the drive.")
+ return
+
if telegraph_content:
try:
button = await get_telegraph_list(telegraph_content)
except Exception as e:
- await editMessage(message, e)
+ await editMessage(message, str(e))
return
msg = BotTheme('LIST_FOUND', NO=contents_no, NAME=key)
await editMessage(message, msg, button)
else:
await editMessage(message, BotTheme('LIST_NOT_FOUND', NAME=key))
-
-@new_task
async def select_type(_, query):
user_id = query.from_user.id
message = query.message
key = message.reply_to_message.text.split(maxsplit=1)[1].strip()
data = query.data.split()
if user_id != int(data[1]):
- return await query.answer(text="Not Yours!", show_alert=True)
+ await query.answer(text="Not Yours!", show_alert=True)
+ return
elif data[2] == 'rec':
+ is_recursive = not bool(eval(data[3]))
+ buttons = await list_buttons(user_id, is_recursive)
await query.answer()
- isRecursive = not bool(eval(data[3]))
- buttons = await list_buttons(user_id, isRecursive)
return await editMessage(message, 'Choose drive list options:', buttons)
elif data[2] == 'cancel':
await query.answer()
return await editMessage(message, "List has been canceled!")
await query.answer()
item_type = data[2]
- isRecursive = eval(data[3])
+ is_recursive = eval(data[3])
await editMessage(message, BotTheme('LIST_SEARCHING', NAME=key))
- await _list_drive(key, message, user_id, item_type, isRecursive)
-
+ await _list_drive(key, message, user_id, item_type, is_recursive)
async def drive_list(_, message):
args = message.text.split() if message.text else ['/cmd']
diff --git a/bot/modules/gen_pyro_sess.py b/bot/modules/gen_pyro_sess.py
index 89e1ecfadd..6c690ecc09 100644
--- a/bot/modules/gen_pyro_sess.py
+++ b/bot/modules/gen_pyro_sess.py
@@ -1,13 +1,16 @@
#!/usr/bin/env python3
from time import time
+import asyncio
from aiofiles.os import remove as aioremove
-from asyncio import sleep, wrap_future, Lock
+from asyncio import wrap_future, Lock
from functools import partial
-from pyrogram import Client
-from pyrogram.filters import command, user, text, private
-from pyrogram.handlers import MessageHandler
-from pyrogram.errors import SessionPasswordNeeded, FloodWait, PhoneNumberInvalid, ApiIdInvalid, PhoneCodeInvalid, PhoneCodeExpired, UsernameNotOccupied, ChatAdminRequired, PeerIdInvalid
+import aiogram
+from aiogram.types import Message
+from aiogram.filters import command, user, text, private
+from aiogram.handlers import MessageHandler
+from aiogram.errors import InputUserDeactivated, ChatAdminRequired, PeerIdInvalid
+from aiogram.utils.exceptions import Throttled, CantParseEntities, MessageCantBeEdited, MessageToEditNotFound, MessageNotModified, TelegramAPIError, NetworkError, RetryAfter, CantParseMessage, InvalidQueryID, CantParseParam, CantParseHTTPURL, CantParsePhoneNumber, CantParseEmailAddress, CantParseUsername, CantParseHash, CantParseVersion
from bot import bot, LOGGER
from bot.helper.ext_utils.bot_utils import new_thread, new_task
@@ -18,19 +21,18 @@
session_lock = Lock()
isStop = False
-@new_task
-async def genPyroString(client, message):
+async def genPyroString(message: Message):
global isStop
session_dict.clear()
sess_msg = await sendMessage(message, """⌬ Pyrogram String Session Generator
Send your API_ID
or APP_ID
.
-Get from https://my.telegram.org.
+Get from https://my.telegram.org.
Timeout: 120s
Send /stop to Stop Process""")
session_dict['message'] = sess_msg
- await wrap_future(invoke(client, message, 'API_ID'))
+ await wrap_future(invoke(message, 'API_ID'))
if isStop:
return
async with session_lock:
@@ -38,14 +40,14 @@ async def genPyroString(client, message):
api_id = int(session_dict['API_ID'])
except Exception:
return await editMessage(sess_msg, "APP_ID
is Invalid.\n\n ⌬ Process Stopped.")
- await sleep(1.5)
+ await asyncio.sleep(1.5)
await editMessage(sess_msg, """⌬ Pyrogram String Session Generator
-Send your API_HASH
. Get from https://my.telegram.org.
+Send your API_HASH
. Get from https://my.telegram.org.
Timeout: 120s
Send /stop to Stop Process""")
- await wrap_future(invoke(client, message, 'API_HASH'))
+ await wrap_future(invoke(message, 'API_HASH'))
if isStop:
return
async with session_lock:
@@ -53,18 +55,18 @@ async def genPyroString(client, message):
if len(api_hash) <= 30:
return await editMessage(sess_msg, "API_HASH
is Invalid.\n\n ⌬ Process Stopped.")
while True:
- await sleep(1.5)
+ await asyncio.sleep(1.5)
await editMessage(sess_msg, """⌬ Pyrogram String Session Generator
Send your Telegram Account's Phone number in International Format ( Including Country Code ). Example : +14154566376.
Timeout: 120s
Send /stop to Stop Process""")
- await wrap_future(invoke(client, message, 'PHONE_NO'))
+ await wrap_future(invoke(message, 'PHONE_NO'))
if isStop:
return
await editMessage(sess_msg, f"⌬ Verification Confirmation:\n\n Is {session_dict['PHONE_NO']} correct? (y/n/yes/no): \n\nSend y/yes (Yes) | n/no (No)")
- await wrap_future(invoke(client, message, 'CONFIRM_PHN'))
+ await wrap_future(invoke(message, 'CONFIRM_PHN'))
if isStop:
return
async with session_lock:
@@ -82,14 +84,12 @@ async def genPyroString(client, message):
await pyro_client.connect()
try:
user_code = await pyro_client.send_code(session_dict['PHONE_NO'])
- await sleep(1.5)
- except FloodWait as e:
+ await asyncio.sleep(1.5)
+ except Throttled as e:
return await editMessage(sess_msg, f"Floodwait of {e.value} Seconds. Retry Again\n\n ⌬ Process Stopped.")
- except ApiIdInvalid:
- return await editMessage(sess_msg, "API_ID and API_HASH are Invalid. Retry Again\n\n ⌬ Process Stopped.")
- except PhoneNumberInvalid:
- return await editMessage(sess_msg, "Phone Number is Invalid. Retry Again\n\n ⌬ Process Stopped.")
- await sleep(1.5)
+ except (ApiIdInvalid, PhoneNumberInvalid):
+ return await editMessage(sess_msg, "API_ID, API_HASH, or Phone Number are Invalid. Retry Again\n\n ⌬ Process Stopped.")
+ await asyncio.sleep(1.5)
await editMessage(sess_msg, """⌬ Pyrogram String Session Generator
OTP has been sent to your Phone Number, Enter OTP in 1 2 3 4 5
format. ( Space between each Digits )
@@ -97,7 +97,7 @@ async def genPyroString(client, message):
Timeout: 120s
Send /stop to Stop Process""")
- await wrap_future(invoke(client, message, 'OTP'))
+ await wrap_future(invoke(message, 'OTP'))
if isStop:
return
async with session_lock:
@@ -109,7 +109,7 @@ async def genPyroString(client, message):
except PhoneCodeExpired:
return await editMessage(sess_msg, " Input OTP has Expired.\n\n ⌬ Process Stopped.")
except SessionPasswordNeeded:
- await sleep(1.5)
+ await asyncio.sleep(1.5)
await editMessage(sess_msg, f"""⌬ Pyrogram String Session Generator
Account is being Protected via Two-Step Verification. Send your Password below.
@@ -118,7 +118,7 @@ async def genPyroString(client, message):
Password Hint : {await pyro_client.get_password_hint()}
Send /stop to Stop Process""")
- await wrap_future(invoke(client, message, 'TWO_STEP_PASS'))
+ await wrap_future(invoke(message, 'TWO_STEP_PASS'))
if isStop:
return
async with session_lock:
@@ -140,9 +140,8 @@ async def genPyroString(client, message):
await aioremove(f'WZML-X-{message.from_user.id}.session')
await aioremove(f'WZML-X-{message.from_user.id}.session-journal')
except: pass
-
-async def set_details(_, message, newkey):
+async def set_details(_, message: Message, newkey):
global isStop
user_id = message.from_user.id
value = message.text
@@ -154,21 +153,19 @@ async def set_details(_, message, newkey):
isStop = True
return await editMessage(session_dict['message'], '⌬ Process Stopped')
-
@new_thread
-async def invoke(client, message, key):
+async def invoke(client, message: Message, key):
global isStop
user_id = message.from_user.id
session_dict[user_id] = True
start_time = time()
handler = client.add_handler(MessageHandler(partial(set_details, newkey=key), filters=user(user_id) & text & private), group=-1)
while session_dict[user_id]:
- await sleep(0.5)
+ await asyncio.sleep(0.5)
if time() - start_time > 120:
session_dict[user_id] = False
await editMessage(message, "⌬ Process Stopped")
isStop = True
client.remove_handler(*handler)
-
-bot.add_handler(MessageHandler(genPyroString, filters=command('exportsession') & private & CustomFilters.sudo))
\ No newline at end of file
+bot.add_handler(MessageHandler(genPyroString, filters=command('exportsession') & private & CustomFilters.sudo))
diff --git a/bot/modules/images.py b/bot/modules/images.py
index 29bab52293..af18cbba2e 100644
--- a/bot/modules/images.py
+++ b/bot/modules/images.py
@@ -1,10 +1,15 @@
#!/usr/bin/env python3
-from asyncio import sleep as asleep
-from aiofiles.os import path as aiopath, remove as aioremove, mkdir
-from telegraph import upload_file
+import asyncio
+import os
+import re
+from urllib.parse import urlparse
+import aiofiles
+import aiohttp
+import telegraph
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.filters import command, regex
+from pyrogram.errors import FloodWait
from bot import bot, LOGGER, config_dict, DATABASE_URL
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage
@@ -14,68 +19,49 @@
from bot.helper.ext_utils.db_handler import DbManger
from bot.helper.telegram_helper.button_build import ButtonMaker
-@new_task
async def picture_add(_, message):
- resm = message.reply_to_message
- editable = await sendMessage(message, "Fetching Input ...")
- if len(message.command) > 1 or resm and resm.text:
- msg_text = resm.text if resm else message.command[1]
- if msg_text.startswith("http"):
- pic_add = msg_text.strip()
- await editMessage(editable, f"Adding your Link : {pic_add}
")
- else:
- return await editMessage(editable, "Not a Valid Link, Must Start with 'http'")
- elif resm and resm.photo:
- if not (resm.photo and resm.photo.file_size <= 5242880*2):
- return await editMessage(editable, "Media is Not Supported! Only Photos!!")
- try:
- photo_dir = await resm.download()
- await editMessage(editable, "Now, Uploading to graph.org
, Please Wait...")
- await asleep(1)
- pic_add = f'https://graph.org{upload_file(photo_dir)[0]}'
- LOGGER.info(f"Telegraph Link : {pic_add}")
- except Exception as e:
- LOGGER.error(f"Images Error: {str(e)}")
- await editMessage(editable, str(e))
- finally:
- await aioremove(photo_dir)
+ editable = await sendMessage(message, "Fetching Input...")
+ args = message.command[1:] if message.command else message.text.split()
+ if len(args) < 1:
+ return await editMessage(editable, "Invalid input. Use /addimage [image_url] or reply to an image.")
+
+ if re.match(r'^https?://', args[0]):
+ pic_add = args[0].strip()
+ elif message.reply_to_message and message.reply_to_message.photo:
+ pic_add = await download_image(message.reply_to_message)
else:
- help_msg = "By Replying to Link (Telegra.ph or DDL):"
- help_msg += f"\n/{BotCommands.AddImageCommand}" + " {link}" + "
\n"
- help_msg += "\nBy Replying to Photo on Telegram:"
- help_msg += f"\n/{BotCommands.AddImageCommand}" + " {photo}" + "
"
- return await editMessage(editable, help_msg)
+ return await editMessage(editable, "Invalid image URL or not a reply to an image.")
+
config_dict['IMAGES'].append(pic_add)
if DATABASE_URL:
await DbManger().update_config({'IMAGES': config_dict['IMAGES']})
- await asleep(1.5)
- await editMessage(editable, f"Successfully Added to Images List!\n\n• Total Images : {len(config_dict['IMAGES'])}")
+ await editMessage(editable, f"Successfully added to Images List!\n• Total Images: {len(config_dict['IMAGES'])}")
async def pictures(_, message):
- user_id = message.from_user.id
if not config_dict['IMAGES']:
- await sendMessage(message, f"No Photo to Show ! Add by /{BotCommands.AddImageCommand}")
- else:
- to_edit = await sendMessage(message, "Generating Grid of your Images...")
- buttons = ButtonMaker()
- buttons.ibutton("<<", f"images {user_id} turn -1")
- buttons.ibutton(">>", f"images {user_id} turn 1")
- buttons.ibutton("Remove Image", f"images {user_id} remov 0")
- buttons.ibutton("Close", f"images {user_id} close")
- buttons.ibutton("Remove All", f"images {user_id} removall", 'footer')
- await deleteMessage(to_edit)
- await sendMessage(message, f'🌄 Image No. : 1 / {len(config_dict["IMAGES"])}', buttons.build_menu(2), config_dict['IMAGES'][0])
+ await sendMessage(message, "No photos to show! Add photos by /addimage command.")
+ return
+
+ to_edit = await sendMessage(message, "Generating grid of your images...")
+ buttons = ButtonMaker()
+ buttons.ibutton("<<", f"images {message.from_user.id} turn -1")
+ buttons.ibutton(">>", f"images {message.from_user.id} turn 1")
+ buttons.ibutton("Remove Image", f"images {message.from_user.id} remov 0")
+ buttons.ibutton("Close", f"images {message.from_user.id} close")
+ buttons.ibutton("Remove All", f"images {message.from_user.id} removall", 'footer')
+ await deleteMessage(to_edit)
+ await sendMessage(message, f'🌄 Image No. : 1 / {len(config_dict["IMAGES"])}', buttons.build_menu(2), config_dict['IMAGES'][0])
@new_task
async def pics_callback(_, query):
- message = query.message
user_id = query.from_user.id
data = query.data.split()
if user_id != int(data[1]):
await query.answer(text="Not Authorized User!", show_alert=True)
return
+
if data[2] == "turn":
await query.answer()
ind = handleIndex(int(data[3]), config_dict['IMAGES'])
@@ -87,15 +73,15 @@ async def pics_callback(_, query):
buttons.ibutton("Remove Image", f"images {data[1]} remov {ind}")
buttons.ibutton("Close", f"images {data[1]} close")
buttons.ibutton("Remove All", f"images {data[1]} removall", 'footer')
- await editMessage(message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind])
+ await editMessage(query.message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind])
elif data[2] == "remov":
config_dict['IMAGES'].pop(int(data[3]))
if DATABASE_URL:
await DbManger().update_config({'IMAGES': config_dict['IMAGES']})
- query.answer("Image Successfully Deleted", show_alert=True)
+ await query.answer("Image Successfully Deleted", show_alert=True)
if len(config_dict['IMAGES']) == 0:
await deleteMessage(query.message)
- await sendMessage(message, f"No Photo to Show ! Add by /{BotCommands.AddImageCommand}")
+ await sendMessage(query.message, "No photos to show! Add photos by /addimage command.")
return
ind = int(data[3])+1
ind = len(config_dict['IMAGES']) - abs(ind) if ind < 0 else ind
@@ -106,19 +92,39 @@ async def pics_callback(_, query):
buttons.ibutton("Remove Image", f"images {data[1]} remov {ind}")
buttons.ibutton("Close", f"images {data[1]} close")
buttons.ibutton("Remove All", f"images {data[1]} removall", 'footer')
- await editMessage(message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind])
+ await editMessage(query.message, pic_info, buttons.build_menu(2), config_dict['IMAGES'][ind])
elif data[2] == 'removall':
config_dict['IMAGES'].clear()
if DATABASE_URL:
await DbManger().update_config({'IMAGES': config_dict['IMAGES']})
await query.answer("All Images Successfully Deleted", show_alert=True)
- await sendMessage(message, f"No Images to Show ! Add by /{BotCommands.AddImageCommand}")
- await deleteMessage(message)
+ await sendMessage(query.message, "No images to show! Add photos by /addimage command.")
+ await deleteMessage(query.message)
else:
await query.answer()
- await deleteMessage(message)
- if message.reply_to_message:
- await deleteMessage(message.reply_to_message)
+ await deleteMessage(query.message)
+
+
+async def download_image(message):
+ try:
+ file_path = await message.download()
+ except FloodWait as e:
+ await asyncio.sleep(e.x)
+ file_path = await message.download()
+ except Exception as e:
+ LOGGER.error(f"Error downloading image: {str(e)}")
+ return None
+
+ url = urlparse(message.reply_to_message.photo.file_unique_id)
+ file_name = f"{url.hostname}.jpg"
+ new_file_path = os.path.join("temp_images", file_name)
+ os.makedirs(os.path.dirname(new_file_path), exist_ok=True)
+ async with aiofiles.open(file_path, 'rb') as f:
+ async with aiofiles.open(new_file_path, 'wb') as out_f:
+ while content := await f.read(4096):
+ await out_f.write(content)
+ os.remove(file_path)
+ return new_file_path
bot.add_handler(MessageHandler(picture_add, filters=command(BotCommands.AddImageCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
diff --git a/bot/modules/imdb.py b/bot/modules/imdb.py
index 2cf008de69..e2c5ebf6c2 100644
--- a/bot/modules/imdb.py
+++ b/bot/modules/imdb.py
@@ -1,11 +1,12 @@
#!/usr/bin/env python3
-from re import findall, IGNORECASE
-from imdb import Cinemagoer
-from pycountry import countries as conn
+import re
+from typing import List, Dict, Union
+from urllib.parse import urlparse
+import requests
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.filters import command, regex
-from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup
+from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, InputMediaPhoto
from pyrogram.errors import MediaEmpty, PhotoInvalidDimensions, WebpageMediaEmpty
from bot import bot, LOGGER, user_data, config_dict
@@ -15,233 +16,140 @@
from bot.helper.ext_utils.bot_utils import get_readable_time
from bot.helper.telegram_helper.button_build import ButtonMaker
-imdb = Cinemagoer()
-
-IMDB_GENRE_EMOJI = {"Action": "🚀", "Adult": "🔞", "Adventure": "🌋", "Animation": "🎠", "Biography": "📜", "Comedy": "🪗", "Crime": "🔪", "Documentary": "🎞", "Drama": "🎭", "Family": "👨👩👧👦", "Fantasy": "🫧", "Film Noir": "🎯", "Game Show": "🎮", "History": "🏛", "Horror": "🧟", "Musical": "🎻", "Music": "🎸", "Mystery": "🧳", "News": "📰", "Reality-TV": "🖥", "Romance": "🥰", "Sci-Fi": "🌠", "Short": "📝", "Sport": "⛳", "Talk-Show": "👨🍳", "Thriller": "🗡", "War": "⚔", "Western": "🪩"}
+IMDB_GENRE_EMOJI = {
+ "Action": "🚀",
+ "Adult": "🔞",
+ "Adventure": "🌋",
+ "Animation": "🎠",
+ "Biography": "📜",
+ "Comedy": "🪗",
+ "Crime": "🔪",
+ "Documentary": "🎞",
+ "Drama": "🎭",
+ "Family": "👨👩👧👦",
+ "Fantasy": "🫧",
+ "Film Noir": "🎯",
+ "Game Show": "🎮",
+ "History": "🏛",
+ "Horror": "🧟",
+ "Musical": "🎻",
+ "Music": "🎸",
+ "Mystery": "🧳",
+ "News": "📰",
+ "Reality-TV": "🖥",
+ "Romance": "🥰",
+ "Sci-Fi": "🌠",
+ "Short": "📝",
+ "Sport": "⛳",
+ "Talk-Show": "👨🍳",
+ "Thriller": "🗡",
+ "War": "⚔",
+ "Western": "🪩",
+}
LIST_ITEMS = 4
-async def imdb_search(_, message):
- if ' ' in message.text:
- k = await sendMessage(message, 'Searching IMDB ...
')
- title = message.text.split(' ', 1)[1]
- user_id = message.from_user.id
- buttons = ButtonMaker()
- if title.lower().startswith("https://www.imdb.com/title/tt"):
- movieid = title.replace("https://www.imdb.com/title/tt", "")
- movie = imdb.get_movie(movieid)
- if not movie:
- return await editMessage(k, "No Results Found")
- buttons.ibutton(f"🎬 {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movieid}")
- else:
- movies = get_poster(title, bulk=True)
- if not movies:
- return editMessage("No Results Found, Try Again or Use Title ID", k)
- for movie in movies: # Refurbished Soon !!
- buttons.ibutton(f"🎬 {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movie.movieID}")
- buttons.ibutton("🚫 Close 🚫", f"imdb {user_id} close")
- await editMessage(k, 'Here What I found on IMDb.com', buttons.build_menu(1))
- else:
- await sendMessage(message, 'Send Movie / TV Series Name along with /imdb Command or send IMDB URL')
-
-
-def get_poster(query, bulk=False, id=False, file=None):
- if not id:
- query = (query.strip()).lower()
- title = query
- year = findall(r'[1-2]\d{3}$', query, IGNORECASE)
- if year:
- year = list_to_str(year[:1])
- title = (query.replace(year, "")).strip()
- elif file is not None:
- year = findall(r'[1-2]\d{3}', file, IGNORECASE)
- if year:
- year = list_to_str(year[:1])
- else:
- year = None
- movieid = imdb.search_movie(title.lower(), results=10)
- if not movieid:
- return None
- if year:
- filtered=list(filter(lambda k: str(k.get('year')) == str(year), movieid))
- if not filtered:
- filtered = movieid
- else:
- filtered = movieid
- movieid=list(filter(lambda k: k.get('kind') in ['movie', 'tv series'], filtered))
- if not movieid:
- movieid = filtered
- if bulk:
- return movieid
- movieid = movieid[0].movieID
- else:
- movieid = query
- movie = imdb.get_movie(movieid)
- if movie.get("original air date"):
- date = movie["original air date"]
- elif movie.get("year"):
- date = movie.get("year")
- else:
- date = "N/A"
- plot = movie.get('plot')
- if plot and len(plot) > 0:
- plot = plot[0]
- else:
- plot = movie.get('plot outline')
- if plot and len(plot) > 300:
- plot = f"{plot[:300]}..."
- return {
- 'title': movie.get('title'),
- 'trailer': movie.get('videos'),
- 'votes': movie.get('votes'),
- "aka": list_to_str(movie.get("akas")),
- "seasons": movie.get("number of seasons"),
- "box_office": movie.get('box office'),
- 'localized_title': movie.get('localized title'),
- 'kind': movie.get("kind"),
- "imdb_id": f"tt{movie.get('imdbID')}",
- "cast": list_to_str(movie.get("cast")),
- "runtime": list_to_str([get_readable_time(int(run) * 60) for run in movie.get("runtimes", "0")]),
- "countries": list_to_hash(movie.get("countries"), True),
- "certificates": list_to_str(movie.get("certificates")),
- "languages": list_to_hash(movie.get("languages")),
- "director": list_to_str(movie.get("director")),
- "writer":list_to_str(movie.get("writer")),
- "producer":list_to_str(movie.get("producer")),
- "composer":list_to_str(movie.get("composer")) ,
- "cinematographer":list_to_str(movie.get("cinematographer")),
- "music_team": list_to_str(movie.get("music department")),
- "distributors": list_to_str(movie.get("distributors")),
- 'release_date': date,
- 'year': movie.get('year'),
- 'genres': list_to_hash(movie.get("genres"), emoji=True),
- 'poster': movie.get('full-size cover url'),
- 'plot': plot,
- 'rating': str(movie.get("rating"))+" / 10",
- 'url':f'https://www.imdb.com/title/tt{movieid}',
- 'url_cast':f'https://www.imdb.com/title/tt{movieid}/fullcredits#cast',
- 'url_releaseinfo':f'https://www.imdb.com/title/tt{movieid}/releaseinfo',
- }
+def get_imdb_id(url: str) -> str:
+ if "imdb.com/title/tt" in url:
+ return url.split("imdb.com/title/tt")[-1]
+ return ""
-def list_to_str(k):
- if not k:
- return ""
- elif len(k) == 1:
- return str(k[0])
- elif LIST_ITEMS:
- k = k[:int(LIST_ITEMS)]
- return ' '.join(f'{elem},' for elem in k)[:-1]+' ...'
- else:
- return ' '.join(f'{elem},' for elem in k)[:-1]
+async def get_imdb_data(query: str) -> Union[Dict, None]:
+ if "http" not in query and "https" not in query:
+ query = f"https://www.imdb.com/find?q={query}&s=tt&ttype=ft&ref_=fn_ft"
+ try:
+ response = requests.get(query)
+ if response.status_code == 200:
+ html_content = response.text
+ start_index = html_content.index('"poster":"') + len('"poster":"')
+ end_index = html_content.index('","image"', start_index)
+ poster_url = html_content[start_index:end_index].replace("\\/", "/")
+ start_index = html_content.index('"title":"') + len('"title":"')
+ end_index = html_content.index('","year"', start_index)
+ title = html_content[start_index:end_index]
+ start_index = html_content.index('"year":"') + len('"year":"')
+ end_index = html_content.index('","id"', start_index)
+ year = html_content[start_index:end_index]
+ start_index = html_content.index('"id":"') + len('"id":"')
+ end_index = html_content.index('","type"', start_index)
+ imdb_id = html_content[start_index:end_index]
+ return {
+ "poster": poster_url,
+ "title": title,
+ "year": year,
+ "imdb_id": imdb_id,
+ }
+ except Exception as e:
+ LOGGER.error(e)
+ return None
-def list_to_hash(k, flagg=False, emoji=False):
- listing = ""
- if not k:
- return ""
- elif len(k) == 1:
- if not flagg:
- if emoji:
- return str(IMDB_GENRE_EMOJI.get(k[0], '')+" #"+k[0].replace(" ", "_").replace("-", "_"))
- return str("#"+k[0].replace(" ", "_").replace("-", "_"))
- try:
- conflag = (conn.get(name=k[0])).flag
- return str(f"{conflag} #" + k[0].replace(" ", "_").replace("-", "_"))
- except AttributeError:
- return str("#"+k[0].replace(" ", "_").replace("-", "_"))
- elif LIST_ITEMS:
- k = k[:int(LIST_ITEMS)]
- for elem in k:
- ele = elem.replace(" ", "_").replace("-", "_")
- if flagg:
- try:
- conflag = (conn.get(name=elem)).flag
- listing += f'{conflag} '
- except AttributeError:
- pass
- if emoji:
- listing += f"{IMDB_GENRE_EMOJI.get(elem, '')} "
- listing += f'#{ele}, '
- return f'{listing[:-2]}'
+async def imdb_search(client, message):
+ if " " not in message.text:
+ await sendMessage(message, 'Send Movie / TV Series Name along with /imdb Command or send IMDB URL')
+ return
+ query = message.text.split(" ", 1)[1]
+ user_id = message.from_user.id
+ buttons = ButtonMaker()
+ if "http" in query or "https" in query:
+ imdb_id = get_imdb_id(query)
+ if not imdb_id:
+ await sendMessage(message, "Invalid IMDB URL")
+ return
+ movie_data = await get_imdb_data(f"https://www.imdb.com/title/{imdb_id}/")
+ if not movie_data:
+ await sendMessage(message, "No results found")
+ return
else:
- for elem in k:
- ele = elem.replace(" ", "_").replace("-", "_")
- if flagg:
- conflag = (conn.get(name=elem)).flag
- listing += f'{conflag} '
- listing += f'#{ele}, '
- return listing[:-2]
-
+ movie_data = await get_imdb_data(f"https://www.imdb.com/find?q={query}&s=tt&ttype=ft&ref_=fn_ft")
+ if not movie_data:
+ await sendMessage(message, "No results found")
+ return
+ buttons.ibutton(f"🎬 {movie_data['title']} ({movie_data['year']})", f"imdb {user_id} movie {movie_data['imdb_id']}")
+ buttons.ibutton("🚫 Close 🚫", f"imdb {user_id} close")
+ await editMessage(message, 'Here What I found on IMDb.com', buttons.build_menu(1))
-async def imdb_callback(_, query):
+async def imdb_callback(client, query):
message = query.message
user_id = query.from_user.id
data = query.data.split()
if user_id != int(data[1]):
await query.answer("Not Yours!", show_alert=True)
+ return
elif data[2] == "movie":
await query.answer()
- imdb = get_poster(query=data[3], id=True)
+ movie_id = data[3]
+ movie_data = await get_imdb_data(f"https://www.imdb.com/title/{movie_id}/")
+ if not movie_data:
+ await query.answer("No results found", show_alert=True)
+ return
buttons = []
- if imdb['trailer']:
- if isinstance(imdb['trailer'], list):
- buttons.append([InlineKeyboardButton("▶️ IMDb Trailer ", url=str(imdb['trailer'][-1]))])
- imdb['trailer'] = list_to_str(imdb['trailer'])
- else: buttons.append([InlineKeyboardButton("▶️ IMDb Trailer ", url=str(imdb['trailer']))])
- buttons.append([InlineKeyboardButton("🚫 Close 🚫", callback_data=f"imdb {user_id} close")])
- template = ''
- #if int(data[1]) in user_data and user_data[int(data[1])].get('imdb_temp'):
- # template = user_data[int(data[1])].get('imdb_temp')
- #if not template:
- template = config_dict['IMDB_TEMPLATE']
- if imdb and template != "":
- cap = template.format(
- title = imdb['title'],
- trailer = imdb['trailer'],
- votes = imdb['votes'],
- aka = imdb["aka"],
- seasons = imdb["seasons"],
- box_office = imdb['box_office'],
- localized_title = imdb['localized_title'],
- kind = imdb['kind'],
- imdb_id = imdb["imdb_id"],
- cast = imdb["cast"],
- runtime = imdb["runtime"],
- countries = imdb["countries"],
- certificates = imdb["certificates"],
- languages = imdb["languages"],
- director = imdb["director"],
- writer = imdb["writer"],
- producer = imdb["producer"],
- composer = imdb["composer"],
- cinematographer = imdb["cinematographer"],
- music_team = imdb["music_team"],
- distributors = imdb["distributors"],
- release_date = imdb['release_date'],
- year = imdb['year'],
- genres = imdb['genres'],
- poster = imdb['poster'],
- plot = imdb['plot'],
- rating = imdb['rating'],
- url = imdb['url'],
- url_cast = imdb['url_cast'],
- url_releaseinfo = imdb['url_releaseinfo'],
- **locals()
- )
- else:
- cap = "No Results"
- if imdb.get('poster'):
+ if movie_data.get("poster"):
try:
- await bot.send_photo(chat_id=query.message.reply_to_message.chat.id, caption=cap, photo=imdb['poster'], reply_to_message_id=query.message.reply_to_message.id, reply_markup=InlineKeyboardMarkup(buttons))
+ await bot.send_photo(
+ chat_id=query.message.reply_to_message.chat.id,
+ caption=movie_data["title"],
+ photo=movie_data["poster"],
+ reply_to_message_id=query.message.reply_to_message.id,
+ reply_markup=InlineKeyboardMarkup(buttons),
+ )
except (MediaEmpty, PhotoInvalidDimensions, WebpageMediaEmpty):
- poster = imdb.get('poster').replace('.jpg', "._V1_UX360.jpg")
- await sendMessage(message.reply_to_message, cap, InlineKeyboardMarkup(buttons), poster)
+ await sendMessage(
+ message.reply_to_message,
+ movie_data["title"],
+ InlineKeyboardMarkup(buttons),
+ movie_data["poster"],
+ )
else:
- await sendMessage(message.reply_to_message, cap, InlineKeyboardMarkup(buttons), 'https://telegra.ph/file/5af8d90a479b0d11df298.jpg')
+ await sendMessage(
+ message.reply_to_message,
+ movie_data["title"],
+ InlineKeyboardMarkup(buttons),
+ 'https://telegra.ph/file/5af8d90a479b0d11df298.jpg',
+ )
await message.delete()
else:
await query.answer()
await query.message.delete()
await query.message.reply_to_message.delete()
-
bot.add_handler(MessageHandler(imdb_search, filters=command(BotCommands.IMDBCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
bot.add_handler(CallbackQueryHandler(imdb_callback, filters=regex(r'^imdb')))
diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py
index 5f78900481..f95d319413 100644
--- a/bot/modules/mediainfo.py
+++ b/bot/modules/mediainfo.py
@@ -1,13 +1,15 @@
#!/usr/bin/env python3
-import aiohttp
-from re import search as re_search
-from shlex import split as ssplit
-from aiofiles import open as aiopen
-from aiofiles.os import remove as aioremove, path as aiopath, mkdir
-from os import path as ospath, getcwd
+import asyncio
+import os
+import re
+import shlex
+from pathlib import Path
-from pyrogram.handlers import MessageHandler
+import aiohttp
+import aiofiles
+from pyrogram.handlers import MessageHandler
from pyrogram.filters import command
+from pyrogram.errors import UserIsBlocked, MessageNotModified, ChatWriteForbidden
from bot import LOGGER, bot, config_dict
from bot.helper.telegram_helper.filters import CustomFilters
@@ -16,81 +18,28 @@
from bot.helper.ext_utils.bot_utils import cmd_exec
from bot.helper.ext_utils.telegraph_helper import telegraph
-
-async def gen_mediainfo(message, link=None, media=None, mmsg=None):
- temp_send = await sendMessage(message, 'Generating MediaInfo...')
+MEDIAINFO_PATH = "Mediainfo/"
+
+async def download_file(session, url, file_path):
+ async with session.get(url, headers={"user-agent": "Mozilla/5.0"}) as response:
+ if response.status != 200:
+ raise Exception(f"Failed to download file: {response.status}")
+ async with aiofiles.open(file_path, "wb") as f:
+ while True:
+ chunk = await response.content.read(10000000)
+ if not chunk:
+ break
+ await f.write(chunk)
+
+async def generate_mediainfo(message, link=None, media=None, mmsg=None):
+ temp_send = await sendMessage(message, "Generating MediaInfo...")
try:
- path = "Mediainfo/"
- if not await aiopath.isdir(path):
- await mkdir(path)
if link:
- filename = re_search(".+/(.+)", link).group(1)
- des_path = ospath.join(path, filename)
- headers = {"user-agent":"Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36"}
+ file_name = re.search(".+/(.+)", link).group(1)
+ file_path = Path(MEDIAINFO_PATH) / file_name
async with aiohttp.ClientSession() as session:
- async with session.get(link, headers=headers) as response:
- async with aiopen(des_path, "wb") as f:
- async for chunk in response.content.iter_chunked(10000000):
- await f.write(chunk)
- break
+ await download_file(session, link, file_path)
elif media:
- des_path = ospath.join(path, media.file_name)
+ file_path = Path(MEDIAINFO_PATH) / media.file_name
if media.file_size <= 50000000:
- await mmsg.download(ospath.join(getcwd(), des_path))
- else:
- async for chunk in bot.stream_media(media, limit=5):
- async with aiopen(des_path, "ab") as f:
- await f.write(chunk)
- stdout, _, _ = await cmd_exec(ssplit(f'mediainfo "{des_path}"'))
- tc = f"' - trigger = False - else: - tc += line + '\n' - tc += '
/{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]}" + " {media}" + "
"
- help_msg += "\n\nBy reply/sending download link:"
- help_msg += f"\n/{BotCommands.MediaInfoCommand[0]} or /{BotCommands.MediaInfoCommand[1]}" + " {link}" + "
"
- if len(message.command) > 1 or rply and rply.text:
- link = rply.text if rply else message.command[1]
- return await gen_mediainfo(message, link)
- elif rply:
- file = next((i for i in [rply.document, rply.video, rply.audio, rply.voice,
- rply.animation, rply.video_note] if i is not None), None)
- if not file:
- return await sendMessage(message, help_msg)
- return await gen_mediainfo(message, None, file, rply)
- else:
- return await sendMessage(message, help_msg)
-
-bot.add_handler(MessageHandler(mediainfo, filters=command(BotCommands.MediaInfoCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
+
diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py
index 42ce65b952..7da3569a68 100644
--- a/bot/modules/mirror_leech.py
+++ b/bot/modules/mirror_leech.py
@@ -3,6 +3,7 @@
from html import escape
from base64 import b64encode
from re import match as re_match
+from urllib.parse import unquote
from asyncio import sleep
from aiofiles import open as aiopen
from aiofiles.os import path as aiopath
@@ -223,9 +224,9 @@ async def __run_multi():
await delete_links(message)
return
- org_link = None
+ org_link, headers, multiAria = None, '', []
if link:
- LOGGER.info(link)
+ LOGGER.info(f"Link: {link}")
org_link = link
if not is_mega_link(link) and not isQbit and not is_magnet(link) and not is_rclone_path(link) \
@@ -235,6 +236,14 @@ async def __run_multi():
process_msg = await sendMessage(message, f"Processing: {link}
")
try:
link = await sync_to_async(direct_link_generator, link)
+ if isinstance(link, list):
+ link, headers = link
+ if isinstance(link, dict):
+ multiAria = [link, headers, unquote(org_link.rstrip('/').rsplit('/', 1)[1])]
+ link = list(multiAria[0].keys())[0]
+ if (folder_name := multiAria[0][link]):
+ path += "/" + folder_name
+ multiAria[0].pop(link)
LOGGER.info(f"Generated link: {link}")
await editMessage(process_msg, f"Generated link: {link}
")
except DirectDownloadLinkException as e:
@@ -318,7 +327,8 @@ async def __run_multi():
return
listener = MirrorLeechListener(message, compress, extract, isQbit, isLeech, tag, select, seed,
- sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link, source_url=org_link if org_link else link)
+ sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link,
+ source_url=org_link if org_link else link, multiAria=multiAria)
if file_ is not None:
await delete_links(message)
@@ -347,10 +357,8 @@ async def __run_multi():
pssw = args['-p'] or args['-pass']
if ussr or pssw:
auth = f"{ussr}:{pssw}"
- auth = "Basic " + b64encode(auth.encode()).decode('ascii')
- else:
- auth = ''
- await add_aria2c_download(link, path, listener, name, auth, ratio, seed_time)
+ headers = f"authorization: Basic {b64encode(auth.encode()).decode('ascii')}"
+ await add_aria2c_download(link, path, listener, name, headers, ratio, seed_time)
await delete_links(message)
diff --git a/bot/modules/save_msg.py b/bot/modules/save_msg.py
index fdb65076e4..5385af1cd0 100644
--- a/bot/modules/save_msg.py
+++ b/bot/modules/save_msg.py
@@ -1,26 +1,39 @@
#!/usr/bin/env python3
-from pyrogram.types import InlineKeyboardMarkup
+from pyrogram.types import InlineKeyboardMarkup, CallbackQuery
from pyrogram.handlers import CallbackQueryHandler
from pyrogram.filters import regex
from asyncio import sleep
+from typing import Optional
from bot import bot, bot_name, user_data
-async def save_message(_, query):
- usr = query.from_user.id
- user_dict = user_data.get(usr, {})
+async def save_message(query: CallbackQuery) -> None:
+ """Save the current message/media to the user's chat."""
+
+ user_id = query.from_user.id
+ user_dict = user_data.get(user_id, {})
+
if query.data == "save":
- if user_dict.get('save_mode'):
- usr = next(iter(user_dict.get('ldump', {}).values()))
try:
- await query.message.copy(usr, reply_markup=InlineKeyboardMarkup(BTN) if (BTN := query.message.reply_markup.inline_keyboard[:-1]) else None)
- await query.answer("Message/Media Successfully Saved !", show_alert=True)
- except:
- if user_dict.get('save_mode'):
- await query.answer('Make Bot as Admin and give Post Permissions and Try Again', show_alert=True)
+ save_mode = user_dict.get('save_mode')
+ if save_mode:
+ user_to_save_to = next(iter(user_dict.get('ldump', {}).values()))
else:
- await query.answer(url=f"https://t.me/{bot_name}?start=start")
+ raise ValueError("Save mode not enabled.")
+ except (StopIteration, KeyError) as e:
+ await query.answer("An error occurred while saving the message.", show_alert=True)
+ return
+
+ try:
+ reply_markup = query.message.reply_markup
+ keyboard = InlineKeyboardMarkup(inline_keyboard := reply_markup.inline_keyboard[:-1]) if reply_markup else None
+ await query.message.copy(user_to_save_to, reply_markup=keyboard)
+ await query.answer("Message/Media successfully saved!", show_alert=True)
+ except Exception as e:
+ if save_mode:
+ await query.answer("Make the bot an admin and give it post permissions.", show_alert=True)
+ else:
+ url = f"https://t.me/{bot_name}?start=start"
+ await query.answer(url, show_alert=True)
await sleep(1)
- await query.message.copy(usr, reply_markup=InlineKeyboardMarkup(BTN) if (BTN := query.message.reply_markup.inline_keyboard[:-1]) else None)
-
-bot.add_handler(CallbackQueryHandler(save_message, filters=regex(r"^save")))
+ await query.message.copy(user_to_save_to, reply_markup=keyboard)
diff --git a/bot/modules/speedtest.py b/bot/modules/speedtest.py
index 11e9753eaf..e6ae5a25dd 100644
--- a/bot/modules/speedtest.py
+++ b/bot/modules/speedtest.py
@@ -1,9 +1,16 @@
#!/usr/bin/env python3
-from speedtest import Speedtest
+import asyncio
+import os
+from urllib.parse import urlparse
+
+import aiohttp
+import requests
+from PIL import Image
from pyrogram.handlers import MessageHandler
from pyrogram.filters import command
+from speedtest import Speedtest
-from bot import bot, LOGGER
+from bot import bot, LOGGER, SUPPORT_CHAT, WHITELIST_CHATS
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, editMessage
@@ -45,11 +52,39 @@ async def speedtest(_, message):
┖ ISP Rating: {result['client']['isprating']}
'''
try:
- pho = await sendMessage(message, string_speed, photo=path)
+ # Download the image using aiohttp
+ async with aiohttp.ClientSession() as session:
+ async with session.get(path) as resp:
+ if resp.status != 200:
+ LOGGER.error(f"Failed to download image: {resp.status}")
+ return
+ jpg_data = await resp.read()
+
+ # Save the image temporarily
+ temp_file = "temp_image.jpg"
+ with open(temp_file, "wb") as f:
+ f.write(jpg_data)
+
+ # Convert the image to a Telegram-friendly format
+ image = Image.open(temp_file)
+ img_bytes = await convert_image_to_telegram_format(image)
+
+ # Send the message with the image
+ pho = await sendMessage(message, string_speed, photo=img_bytes)
+ os.remove(temp_file)
await deleteMessage(speed)
except Exception as e:
LOGGER.error(str(e))
pho = await editMessage(speed, string_speed)
+
+async def convert_image_to_telegram_format(image):
+ """Convert the image to a format suitable for sending via Telegram."""
+ img_bytes = await loop.run_in_executor(None, functools.partial(image.tobytes))
+ img_data = io.BytesIO(img_bytes)
+ img_data.seek(0)
+ return img_data
+
+
bot.add_handler(MessageHandler(speedtest, filters=command(
BotCommands.SpeedCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
diff --git a/bot/modules/status.py b/bot/modules/status.py
index 10d53836e4..5763288777 100644
--- a/bot/modules/status.py
+++ b/bot/modules/status.py
@@ -1,60 +1,71 @@
#!/usr/bin/env python3
+from typing import Coroutine
+
+import asyncio
+import time
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
-from pyrogram.filters import command, regex
+from pyrogram.filters import ChatUpdate, Text, regex
+from pyrogram.types import Message, CallbackQuery
from psutil import cpu_percent, virtual_memory, disk_usage
-from time import time
from asyncio import sleep
-
-from bot import bot_cache, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, Interval, config_dict, bot
+from bot import bot_cache, status_reply_dict_lock, download_dict, download_dict_lock, botStartTime, Interval, config_dict, Client as bot
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, auto_delete_message, sendStatusMessage, user_info, update_all_messages, delete_all_messages
from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time, turn_page, setInterval, new_task
from bot.helper.themes import BotTheme
-
@new_task
-async def mirror_status(_, message):
- async with download_dict_lock:
- count = len(download_dict)
- if count == 0:
- currentTime = get_readable_time(time() - botStartTime)
- free = get_readable_file_size(disk_usage(config_dict['DOWNLOAD_DIR']).free)
- msg = BotTheme('NO_ACTIVE_DL', cpu=cpu_percent(), free=free, free_p=round(100-disk_usage(config_dict['DOWNLOAD_DIR']).percent, 1),
- ram=virtual_memory().percent, uptime=currentTime)
- reply_message = await sendMessage(message, msg)
- await auto_delete_message(message, reply_message)
- else:
- await sendStatusMessage(message)
- await deleteMessage(message)
- async with status_reply_dict_lock:
- if Interval:
- Interval[0].cancel()
- Interval.clear()
- Interval.append(setInterval(config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages))
-
-
-@new_task
-async def status_pages(_, query):
- user_id = query.from_user.id
- data = query.data.split()
- if data[1] == 'ref':
- bot_cache.setdefault('status_refresh', {})
- if user_id in (refresh_status := bot_cache['status_refresh']) and (curr := (time() - refresh_status[user_id])) < 7:
- return await query.answer(f'Already Refreshed! Try after {get_readable_time(7 - curr)}', show_alert=True)
+async def mirror_status(context: Coroutine, message: Message):
+ """
+ Handles the /status command and sends the current status of the bot.
+ """
+ try:
+ async with download_dict_lock:
+ count = len(download_dict)
+ if count == 0:
+ currentTime = get_readable_time(time() - botStartTime)
+ free = get_readable_file_size(disk_usage(config_dict['DOWNLOAD_DIR']).free)
+ msg = BotTheme('NO_ACTIVE_DL', cpu=cpu_percent(), free=free, free_p=round(100-disk_usage(config_dict['DOWNLOAD_DIR']).percent, 1),
+ ram=virtual_memory().percent, uptime=currentTime)
+ reply_message = await sendMessage(message, msg)
+ await auto_delete_message(message, reply_message)
else:
- refresh_status[user_id] = time()
- await editMessage(query.message, f"{(await user_info(user_id)).mention(style='html')}, Refreshing Status...")
- await sleep(1.5)
- await update_all_messages(True)
- elif data[1] in ['nex', 'pre']:
- await turn_page(data)
- await update_all_messages(True)
- elif data[1] == 'close':
- await delete_all_messages()
- await query.answer()
+ await sendStatusMessage(message)
+ await deleteMessage(message)
+ async with status_reply_dict_lock:
+ if Interval:
+ Interval[0].cancel()
+ Interval.clear()
+ Interval.append(setInterval(config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages))
+ except Exception as e:
+ print(f"Error in mirror_status: {e}")
+@new_task
+async def status_pages(context: Coroutine, query: CallbackQuery):
+ """
+ Handles the status callback queries and updates the status messages accordingly.
+ """
+ try:
+ user_id = query.from_user.id
+ data = query.data.split()
+ if data[1] == 'ref':
+ bot_cache.setdefault('status_refresh', {})
+ if user_id in (refresh_status := bot_cache['status_refresh']) and (curr := (time() - refresh_status[user_id])) < 7:
+ return await query.answer(f'Already Refreshed! Try after {get_readable_time(7 - curr)}', show_alert=True)
+ else:
+ refresh_status[user_id] = time()
+ await editMessage(query.message, f"{(await user_info(user_id)).mention(style='html')}, Refreshing Status...")
+ await sleep(1.5)
+ await update_all_messages(True)
+ elif data[1] in ['nex', 'pre']:
+ await turn_page(data)
+ await update_all_messages(True)
+ elif data[1] == 'close':
+ await delete_all_messages()
+ await query.answer()
+ except Exception as e:
+ print(f"Error in status_pages: {e}")
-bot.add_handler(MessageHandler(mirror_status, filters=command(
- BotCommands.StatusCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
-bot.add_handler(CallbackQueryHandler(status_pages, filters=regex("^status")))
+bot.add_handler(MessageHandler(mirror_status, filters=ChatUpdate(BotCommands.StatusCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
+bot.add_handler(CallbackQueryHandler(status_pages, pattern="status"))
diff --git a/bot/modules/torrent_select.py b/bot/modules/torrent_select.py
index fa821394fd..ebc515b632 100644
--- a/bot/modules/torrent_select.py
+++ b/bot/modules/torrent_select.py
@@ -1,54 +1,74 @@
#!/usr/bin/env python3
+from typing import Coroutine, Final, List, Optional
+
+import aiofiles.os as aiofiles
+import pyrogram.filters
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
-from pyrogram.filters import regex
-from aiofiles.os import remove as aioremove, path as aiopath
+from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, Message
+
+import bot.helper.telegram_helper.bot_commands as BotCommands
+from bot.helper.telegram_helper.bot_utils import get_download_by_gid
+from bot.helper.telegram_helper.message_utils import delete_message, send_message, send_status_message
+from bot.helper.ext_utils.bot_utils import is_sudo_user, sync_to_async
+
+from .mirror_status import MirrorStatus
+from .aria2_manager import Aria2Manager
+from .qbittorrent_manager import QBittorrentManager
-from bot import bot, bot_name, aria2, download_dict, download_dict_lock, OWNER_ID, user_data, LOGGER
-from bot.helper.telegram_helper.bot_commands import BotCommands
-from bot.helper.telegram_helper.filters import CustomFilters
-from bot.helper.telegram_helper.message_utils import sendMessage, sendStatusMessage, deleteMessage
-from bot.helper.ext_utils.bot_utils import getDownloadByGid, MirrorStatus, bt_selection_buttons, sync_to_async
+bot: Final = None
+bot_name: Final = None
+aria2: Final = None
+download_dict: Final = None
+download_dict_lock: Final = None
+OWNER_ID: Final = None
+user_data: Final = None
+LOGGER: Final = None
-async def select(client, message):
+async def select(client: pyrogram.Client, message: Message) -> Coroutine:
user_id = message.from_user.id
- msg = message.text.split('_', maxsplit=1)
+ msg = message.text.split("_", maxsplit=1)
if len(msg) > 1:
- cmd_data = msg[1].split('@', maxsplit=1)
+ cmd_data = msg[1].split("@", maxsplit=1)
if len(cmd_data) > 1 and cmd_data[1].strip() != bot_name:
return
gid = cmd_data[0]
- dl = await getDownloadByGid(gid)
- if dl is None:
- await sendMessage(message, f"GID: {gid}
Not Found.")
- return
+ dl = await get_download_by_gid(gid)
elif reply_to_id := message.reply_to_message_id:
async with download_dict_lock:
dl = download_dict.get(reply_to_id, None)
- if dl is None:
- await sendMessage(message, "This is not an active task!")
- return
elif len(msg) == 1:
- msg = ("Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n"
- + "This command mainly for selection incase you decided to select files from already added torrent. "
- + "But you can always use /cmd with arg `s` to select files before download start.")
- await sendMessage(message, msg)
+ await send_message(
+ message,
+ (
+ "Reply to an active /cmd which was used to start the qb-download or add gid along with cmd\n\n"
+ "This command mainly for selection incase you decided to select files from already added torrent. "
+ "But you can always use /cmd with arg `s` to select files before download start."
+ ),
+ )
return
- if OWNER_ID != user_id and dl.message.from_user.id != user_id and \
- (user_id not in user_data or not user_data[user_id].get('is_sudo')):
- await sendMessage(message, "This task is not for you!")
+ if (
+ OWNER_ID != user_id
+ and dl.message.from_user.id != user_id
+ and (user_id not in user_data or not user_data[user_id].get("is_sudo"))
+ ):
+ await send_message(message, "This task is not for you!")
return
- if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUEDL]:
- await sendMessage(message, 'Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!')
+
+ if dl.status() not in [MirrorStatus.STATUS_DOWNLOADING, MirrorStatus.STATUS_PAUSED, MirrorStatus.STATUS_QUEUED]:
+ await send_message(
+ message,
+ "Task should be in download or pause (incase message deleted by wrong) or queued (status incase you used torrent file)!",
+ )
return
- if dl.name().startswith('[METADATA]'):
- await sendMessage(message, 'Try after downloading metadata finished!')
+
+ if dl.name().startswith("[METADATA]"):
+ await send_message(message, "Try after downloading metadata finished!")
return
try:
- listener = dl.listener()
- if listener.isQbit:
+ if dl.is_qbit:
id_ = dl.hash()
client = dl.client()
if not dl.queued:
@@ -56,79 +76,107 @@ async def select(client, message):
else:
id_ = dl.gid()
if not dl.queued:
- try:
- await sync_to_async(aria2.client.force_pause, id_)
- except Exception as e:
- LOGGER.error(
- f"{e} Error in pause, this mostly happens after abuse aria2")
- listener.select = True
- except:
- await sendMessage(message, "This is not a bittorrent task!")
+ await sync_to_async(aria2.pause, id_)
+
+ dl.listener.select = True
+ except Exception as e: # noqa
+ await send_message(message, "This is not a bittorrent task!")
return
- SBUTTONS = bt_selection_buttons(id_)
+ buttons = bt_selection_buttons(id_)
msg = "Your download paused. Choose files then press Done Selecting button to resume downloading."
- await sendMessage(message, msg, SBUTTONS)
+ await send_message(message, msg, buttons)
async def get_confirm(client, query):
user_id = query.from_user.id
data = query.data.split()
message = query.message
- dl = await getDownloadByGid(data[2])
+ dl = await get_download_by_gid(data[2])
if dl is None:
await query.answer("This task has been cancelled!", show_alert=True)
- await deleteMessage(message)
+ await delete_message(message)
return
- if hasattr(dl, 'listener'):
+
+ if hasattr(dl, "listener"):
listener = dl.listener()
else:
- await query.answer("Not in download state anymore! Keep this message to resume the seed if seed enabled!", show_alert=True)
+ await query.answer(
+ "Not in download state anymore! Keep this message to resume the seed if seed enabled!",
+ show_alert=True,
+ )
return
- if user_id != listener.message.from_user.id and not await CustomFilters.sudo(client, query):
+
+ if user_id != listener.message.from_user.id and not await is_sudo_user(client, query):
await query.answer("This task is not for you!", show_alert=True)
elif data[1] == "pin":
await query.answer(data[3], show_alert=True)
elif data[1] == "done":
await query.answer()
+
id_ = data[3]
if len(id_) > 20:
client = dl.client()
tor_info = (await sync_to_async(client.torrents_info, torrent_hash=id_))[0]
- path = tor_info.content_path.rsplit('/', 1)[0]
+ path = tor_info.content_path.rsplit("/", 1)[0]
res = await sync_to_async(client.torrents_files, torrent_hash=id_)
- for f in res:
- if f.priority == 0:
- f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"]
- for f_path in f_paths:
- if await aiopath.exists(f_path):
- try:
- await aioremove(f_path)
- except:
- pass
+
+ coroutines = [
+ aiofiles.os_path.isfile(f"{path}/{f.name}"),
+ aiofiles.os_path.isfile(f"{path}/{f.name}.!qB"),
+ ]
+
+ files_to_delete = []
+ for coroutine in coroutines:
+ file_exists = await coroutine
+ if file_exists:
+ files_to_delete.append(f"{path}/{f.name}")
+ files_to_delete.append(f"{path}/{f.name}.!qB")
+
+ for file_to_delete in files_to_delete:
+ try:
+ await aiofiles.os_path.remove(file_to_delete)
+ except:
+ pass
+
if not dl.queued:
await sync_to_async(client.torrents_resume, torrent_hashes=id_)
else:
- res = await sync_to_async(aria2.client.get_files, id_)
- for f in res:
- if f['selected'] == 'false' and await aiopath.exists(f['path']):
- try:
- await aioremove(f['path'])
- except:
- pass
+ res = await sync_to_async(aria2.get_files, id_)
+
+ coroutines = [
+ aiofiles.os_path.isfile(f["path"]),
+ aiofiles.os_path.remove(f["path"]),
+ ]
+
+ for file in res:
+ if file["selected"] == "false":
+ file_exists = await coroutines[0]
+ if file_exists:
+ await coroutines[1]
+
if not dl.queued:
try:
- await sync_to_async(aria2.client.unpause, id_)
- except Exception as e:
- LOGGER.error(f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!")
- await sendStatusMessage(message)
- await deleteMessage(message)
+ await sync_to_async(aria2.unpause, id_)
+ except Exception as e: # noqa
+ LOGGER.error(
+ f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!"
+ )
+
+ await send_status_message(message)
+ await delete_message(message)
elif data[1] == "rm":
await query.answer()
await (dl.download()).cancel_download()
- await deleteMessage(message)
+ await delete_message(message)
-bot.add_handler(MessageHandler(select, filters=regex(
- f"^/{BotCommands.BtSelectCommand}(_\w+)?") & CustomFilters.authorized & ~CustomFilters.blacklisted))
-bot.add_handler(CallbackQueryHandler(get_confirm, filters=regex("^btsel")))
+bot.add_handler(
+ MessageHandler(
+ select,
+ filters=pyrogram.filters.regex(f"^/{BotCommands.BtSelectCommand}(_\w+)?")
+ & CustomFilters.authorized
+ & ~CustomFilters.blacklisted,
+ )
+)
+bot.add_handler(CallbackQueryHandler(get_confirm, filters=pyrogram.filters.regex("^btsel")))
diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py
index e332e223e8..8b526e1482 100644
--- a/bot/modules/users_settings.py
+++ b/bot/modules/users_settings.py
@@ -117,7 +117,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
ddl_serv = len(val) if (val := user_dict.get('ddl_servers', False)) else 0
buttons.ibutton("DDL Servers", f"userset {user_id} ddl_servers")
- tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled"
+ tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled"
if not config_dict['USER_TD_MODE']:
tds_mode = "Force Disabled"
@@ -225,7 +225,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
buttons.ibutton('Disable DDL' if ddl_mode == 'Enabled' else 'Enable DDL', f"userset {user_id} s{key}", "header")
elif key == 'user_tds':
set_exist = len(val) if (val:=user_dict.get(key, False)) else 'Not Exists'
- tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled"
+ tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled"
buttons.ibutton('Disable UserTDs' if tds_mode == 'Enabled' else 'Enable UserTDs', f"userset {user_id} td_mode", "header")
if not config_dict['USER_TD_MODE']:
tds_mode = "Force Disabled"
diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py
index 402a65dc35..43e9020711 100644
--- a/bot/modules/ytdlp.py
+++ b/bot/modules/ytdlp.py
@@ -1,18 +1,22 @@
#!/usr/bin/env python3
+import os
+import asyncio
+import time
+from typing import Any, Dict, List, Optional, Union
+
+import aiohttp
+import aiofiles
+import youtube_dl
from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.filters import command, regex, user
-from asyncio import sleep, wait_for, Event, wrap_future
-from aiohttp import ClientSession
-from aiofiles.os import path as aiopath
-from yt_dlp import YoutubeDL
-from functools import partial
-from time import time
+from pyrogram.types import Message, CallbackQuery, InlineKeyboardButton, InlineKeyboardMarkup
+from youtube_dl.utils import DownloadError
-from bot import DOWNLOAD_DIR, bot, categories_dict, config_dict, user_data, LOGGER
-from bot.helper.ext_utils.task_manager import task_utils
+import bot
+from bot.helper.ext_utils.task_manager import new_task
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, auto_delete_message, delete_links, open_category_btns, open_dump_btns
from bot.helper.telegram_helper.button_build import ButtonMaker
-from bot.helper.ext_utils.bot_utils import get_readable_file_size, fetch_user_tds, fetch_user_dumps, is_url, is_gdrive_link, new_task, sync_to_async, new_task, is_rclone_path, new_thread, get_readable_time, arg_parser
+from bot.helper.ext_utils.bot_utils import get_readable_file_size, fetch_user_tds, fetch_user_dumps, is_url, is_gdrive_link, new_task, is_rclone_path, new_thread, get_readable_time, arg_parser
from bot.helper.mirror_utils.download_utils.yt_dlp_download import YoutubeDLHelper
from bot.helper.mirror_utils.rclone_utils.list import RcloneList
from bot.helper.telegram_helper.bot_commands import BotCommands
@@ -22,504 +26,40 @@
from bot.helper.ext_utils.help_messages import YT_HELP_MESSAGE
from bot.helper.ext_utils.bulk_links import extract_bulk_links
+YTDL_OPTIONS = youtube_dl.utils.Options()
+YTDL_OPTIONS.merge_output_format = "yes"
+YTDL_OPTIONS.outtmpl = "{filetitle}.%(ext)s"
+YTDL_OPTIONS.default_search = "auto"
+YTDL_OPTIONS.nocheckcertificate = True
+YTDL_OPTIONS.forcejson = True
+YTDL_OPTIONS.dump_single_json = True
+YTDL_OPTIONS.dateafter = "19700101"
+YTDL_OPTIONS.prefer_ffmpeg = True
+YTDL_OPTIONS.geo_bypass = True
+YTDL_OPTIONS.recode_video = "yes"
+YTDL_OPTIONS.writeinfojson = True
+YTDL_OPTIONS.writeannotations = True
+YTDL_OPTIONS.writeallinfojson = True
+YTDL_OPTIONS.ignoreerrors = True
+YTDL_OPTIONS.no_warnings = True
+YTDL_OPTIONS.ignorepostprocess = True
+YTDL_OPTIONS.postprocessors = []
+YTDL_OPTIONS.prefer_quality = "highest"
+YTDL_OPTIONS.geo_bypass_country = "US"
+YTDL_OPTIONS.simulate = True
+YTDL_OPTIONS.no_color = True
+YTDL_OPTIONS.no_call_home = True
+YTDL_OPTIONS.no_part = True
+YTDL_OPTIONS.no_mtime = True
+YTDL_OPTIONS.no_playlist = False
+YTDL_OPTIONS.no_post_overwrites = True
+YTDL_OPTIONS.noprogress = True
+YTDL_OPTIONS.quiet = True
+YTDL_OPTIONS.ratelimit = 0
+YTDL_OPTIONS.concurrent_requests = 16
+YTDL_OPTIONS. Larry = 1
+YTDL_OPTIONS.hide_banner = True
+YTDL_OPTIONS.dump_json = True
+YTDL_OPTIONS.logtostderr = False
-@new_task
-async def select_format(_, query, obj):
- data = query.data.split()
- message = query.message
- await query.answer()
- if data[1] == 'dict':
- b_name = data[2]
- await obj.qual_subbuttons(b_name)
- elif data[1] == 'mp3':
- await obj.mp3_subbuttons()
- elif data[1] == 'audio':
- await obj.audio_format()
- elif data[1] == 'aq':
- if data[2] == 'back':
- await obj.audio_format()
- else:
- await obj.audio_quality(data[2])
- elif data[1] == 'back':
- await obj.back_to_main()
- elif data[1] == 'cancel':
- await editMessage(message, 'Task has been cancelled.')
- obj.qual = None
- obj.is_cancelled = True
- obj.event.set()
- else:
- if data[1] == 'sub':
- obj.qual = obj.formats[data[2]][data[3]][1]
- elif '|' in data[1]:
- obj.qual = obj.formats[data[1]]
- else:
- obj.qual = data[1]
- obj.event.set()
-
-
-class YtSelection:
- def __init__(self, client, message):
- self.__message = message
- self.__user_id = message.from_user.id
- self.__client = client
- self.__is_m4a = False
- self.__reply_to = None
- self.__time = time()
- self.__timeout = 120
- self.__is_playlist = False
- self.is_cancelled = False
- self.__main_buttons = None
- self.event = Event()
- self.formats = {}
- self.qual = None
-
- @new_thread
- async def __event_handler(self):
- pfunc = partial(select_format, obj=self)
- handler = self.__client.add_handler(CallbackQueryHandler(
- pfunc, filters=regex('^ytq') & user(self.__user_id)), group=-1)
- try:
- await wait_for(self.event.wait(), timeout=self.__timeout)
- except:
- await editMessage(self.__reply_to, 'Timed Out. Task has been cancelled!')
- self.qual = None
- self.is_cancelled = True
- self.event.set()
- finally:
- self.__client.remove_handler(*handler)
-
- async def get_quality(self, result):
- future = self.__event_handler()
- buttons = ButtonMaker()
- if 'entries' in result:
- self.__is_playlist = True
- for i in ['144', '240', '360', '480', '720', '1080', '1440', '2160']:
- video_format = f'bv*[height<=?{i}][ext=mp4]+ba[ext=m4a]/b[height<=?{i}]'
- b_data = f'{i}|mp4'
- self.formats[b_data] = video_format
- buttons.ibutton(f'{i}-mp4', f'ytq {b_data}')
- video_format = f'bv*[height<=?{i}][ext=webm]+ba/b[height<=?{i}]'
- b_data = f'{i}|webm'
- self.formats[b_data] = video_format
- buttons.ibutton(f'{i}-webm', f'ytq {b_data}')
- buttons.ibutton('MP3', 'ytq mp3')
- buttons.ibutton('Audio Formats', 'ytq audio')
- buttons.ibutton('Best Videos', 'ytq bv*+ba/b')
- buttons.ibutton('Best Audios', 'ytq ba/b')
- buttons.ibutton('Cancel', 'ytq cancel', 'footer')
- self.__main_buttons = buttons.build_menu(3)
- msg = f'Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- else:
- format_dict = result.get('formats')
- if format_dict is not None:
- for item in format_dict:
- if item.get('tbr'):
- format_id = item['format_id']
-
- if item.get('filesize'):
- size = item['filesize']
- elif item.get('filesize_approx'):
- size = item['filesize_approx']
- else:
- size = 0
-
- if item.get('video_ext') == 'none' and item.get('acodec') != 'none':
- if item.get('audio_ext') == 'm4a':
- self.__is_m4a = True
- b_name = f"{item['acodec']}-{item['ext']}"
- v_format = f'ba[format_id={format_id}]'
- elif item.get('height'):
- height = item['height']
- ext = item['ext']
- fps = item['fps'] if item.get('fps') else ''
- b_name = f'{height}p{fps}-{ext}'
- ba_ext = '[ext=m4a]' if self.__is_m4a and ext == 'mp4' else ''
- v_format = f'bv*[format_id={format_id}]+ba{ba_ext}/b[height=?{height}]'
- else:
- continue
-
- self.formats.setdefault(b_name, {})[f"{item['tbr']}"] = [
- size, v_format]
-
- for b_name, tbr_dict in self.formats.items():
- if len(tbr_dict) == 1:
- tbr, v_list = next(iter(tbr_dict.items()))
- buttonName = f'{b_name} ({get_readable_file_size(v_list[0])})'
- buttons.ibutton(buttonName, f'ytq sub {b_name} {tbr}')
- else:
- buttons.ibutton(b_name, f'ytq dict {b_name}')
- buttons.ibutton('MP3', 'ytq mp3')
- buttons.ibutton('Audio Formats', 'ytq audio')
- buttons.ibutton('Best Video', 'ytq bv*+ba/b')
- buttons.ibutton('Best Audio', 'ytq ba/b')
- buttons.ibutton('Cancel', 'ytq cancel', 'footer')
- self.__main_buttons = buttons.build_menu(2)
- msg = f'Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- self.__reply_to = await sendMessage(self.__message, msg, self.__main_buttons)
- await wrap_future(future)
- if not self.is_cancelled:
- await deleteMessage(self.__reply_to)
- return self.qual
-
- async def back_to_main(self):
- if self.__is_playlist:
- msg = f'Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- else:
- msg = f'Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- await editMessage(self.__reply_to, msg, self.__main_buttons)
-
- async def qual_subbuttons(self, b_name):
- buttons = ButtonMaker()
- tbr_dict = self.formats[b_name]
- for tbr, d_data in tbr_dict.items():
- button_name = f'{tbr}K ({get_readable_file_size(d_data[0])})'
- buttons.ibutton(button_name, f'ytq sub {b_name} {tbr}')
- buttons.ibutton('Back', 'ytq back', 'footer')
- buttons.ibutton('Cancel', 'ytq cancel', 'footer')
- subbuttons = buttons.build_menu(2)
- msg = f'Choose Bit rate for {b_name}:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- await editMessage(self.__reply_to, msg, subbuttons)
-
- async def mp3_subbuttons(self):
- i = 's' if self.__is_playlist else ''
- buttons = ButtonMaker()
- audio_qualities = [64, 128, 320]
- for q in audio_qualities:
- audio_format = f'ba/b-mp3-{q}'
- buttons.ibutton(f'{q}K-mp3', f'ytq {audio_format}')
- buttons.ibutton('Back', 'ytq back')
- buttons.ibutton('Cancel', 'ytq cancel')
- subbuttons = buttons.build_menu(3)
- msg = f'Choose mp3 Audio{i} Bitrate:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- await editMessage(self.__reply_to, msg, subbuttons)
-
- async def audio_format(self):
- i = 's' if self.__is_playlist else ''
- buttons = ButtonMaker()
- for frmt in ['aac', 'alac', 'flac', 'm4a', 'opus', 'vorbis', 'wav']:
- audio_format = f'ba/b-{frmt}-'
- buttons.ibutton(frmt, f'ytq aq {audio_format}')
- buttons.ibutton('Back', 'ytq back', 'footer')
- buttons.ibutton('Cancel', 'ytq cancel', 'footer')
- subbuttons = buttons.build_menu(3)
- msg = f'Choose Audio{i} Format:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- await editMessage(self.__reply_to, msg, subbuttons)
-
- async def audio_quality(self, format):
- i = 's' if self.__is_playlist else ''
- buttons = ButtonMaker()
- for qual in range(11):
- audio_format = f'{format}{qual}'
- buttons.ibutton(qual, f'ytq {audio_format}')
- buttons.ibutton('Back', 'ytq aq back')
- buttons.ibutton('Cancel', 'ytq aq cancel')
- subbuttons = buttons.build_menu(5)
- msg = f'Choose Audio{i} Qaulity:\n0 is best and 10 is worst\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}'
- await editMessage(self.__reply_to, msg, subbuttons)
-
-
-def extract_info(link, options):
- with YoutubeDL(options) as ydl:
- result = ydl.extract_info(link, download=False)
- if result is None:
- raise ValueError('Info result is None')
- return result
-
-
-async def _mdisk(link, name):
- key = link.split('/')[-1]
- async with ClientSession() as session:
- async with session.get(f'https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}') as resp:
- if resp.status == 200:
- resp_json = await resp.json()
- link = resp_json['source']
- if not name:
- name = resp_json['filename']
- return name, link
-
-
-@new_task
-async def _ytdl(client, message, isLeech=False, sameDir=None, bulk=[]):
- text = message.text.split('\n')
- input_list = text[0].split(' ')
- qual = ''
- arg_base = {'link': '',
- '-i': 0,
- '-m': '', '-sd': '', '-samedir': '',
- '-s': False, '-select': False,
- '-opt': '', '-options': '',
- '-b': False, '-bulk': False,
- '-n': '', '-name': '',
- '-z': False, '-zip': False,
- '-up': '', '-upload': False,
- '-rcf': '',
- '-id': '',
- '-index': '',
- '-c': '', '-category': '',
- '-ud': '', '-dump': '',
- }
-
- args = arg_parser(input_list[1:], arg_base)
- cmd = input_list[0].split('@')[0]
-
- try:
- multi = int(args['-i'])
- except:
- multi = 0
-
- select = args['-s'] or args['-select']
- isBulk = args['-b'] or args['-bulk']
- opt = args['-opt'] or args['-options']
- folder_name = args['-m'] or args['-sd'] or args['-samedir']
- name = args['-n'] or args['-name']
- up = args['-up'] or args['-upload']
- rcf = args['-rcf']
- link = args['link']
- compress = args['-z'] or args['-zip'] or 'z' in cmd or 'zip' in cmd
- drive_id = args['-id']
- index_link = args['-index']
- gd_cat = args['-c'] or args['-category']
- user_dump = args['-ud'] or args['-dump']
- bulk_start = 0
- bulk_end = 0
-
-
- if not isinstance(isBulk, bool):
- dargs = isBulk.split(':')
- bulk_start = dargs[0] or None
- if len(dargs) == 2:
- bulk_end = dargs[1] or None
- isBulk = True
-
- if drive_id and is_gdrive_link(drive_id):
- drive_id = GoogleDriveHelper.getIdFromUrl(drive_id)
-
- if folder_name and not isBulk:
- folder_name = f'/{folder_name}'
- if sameDir is None:
- sameDir = {'total': multi, 'tasks': set(), 'name': folder_name}
- sameDir['tasks'].add(message.id)
-
- if isBulk:
- try:
- bulk = await extract_bulk_links(message, bulk_start, bulk_end)
- if len(bulk) == 0:
- raise ValueError('Bulk Empty!')
- except:
- await sendMessage(message, 'Reply to text file or tg message that have links seperated by new line!')
- return
- b_msg = input_list[:1]
- b_msg.append(f'{bulk[0]} -i {len(bulk)}')
- nextmsg = await sendMessage(message, " ".join(b_msg))
- nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=nextmsg.id)
- nextmsg.from_user = message.from_user
- _ytdl(client, nextmsg, isLeech, sameDir, bulk)
- return
-
- if len(bulk) != 0:
- del bulk[0]
-
- @new_task
- async def __run_multi():
- if multi <= 1:
- return
- await sleep(5)
- if len(bulk) != 0:
- msg = input_list[:1]
- msg.append(f'{bulk[0]} -i {multi - 1}')
- nextmsg = await sendMessage(message, " ".join(msg))
- else:
- msg = [s.strip() for s in input_list]
- index = msg.index('-i')
- msg[index+1] = f"{multi - 1}"
- nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1)
- nextmsg = await sendMessage(nextmsg, " ".join(msg))
- nextmsg = await client.get_messages(chat_id=message.chat.id, message_ids=nextmsg.id)
- if folder_name:
- sameDir['tasks'].add(nextmsg.id)
- nextmsg.from_user = message.from_user
- await sleep(5)
- _ytdl(client, nextmsg, isLeech, sameDir, bulk)
-
- path = f'{DOWNLOAD_DIR}{message.id}{folder_name}'
-
- opt = opt or config_dict['YT_DLP_OPTIONS']
-
- if len(text) > 1 and text[1].startswith('Tag: '):
- tag, id_ = text[1].split('Tag: ')[1].split()
- message.from_user = await client.get_users(id_)
- try:
- await message.unpin()
- except:
- pass
- elif sender_chat := message.sender_chat:
- tag = sender_chat.title
- if username := message.from_user.username:
- tag = f'@{username}'
- else:
- tag = message.from_user.mention
-
- if not link and (reply_to := message.reply_to_message) and reply_to.text:
- link = reply_to.text.split('\n', 1)[0].strip()
-
- if not is_url(link):
- btn = ButtonMaker()
- btn.ibutton('Cʟɪᴄᴋ Hᴇʀᴇ Tᴏ Rᴇᴀᴅ Mᴏʀᴇ ...', f'wzmlx {message.from_user.id} help YT')
- await sendMessage(message, YT_HELP_MESSAGE[0], btn.build_menu(1))
- await delete_links(message)
- return
-
- error_msg = []
- error_button = None
- task_utilis_msg, error_button = await task_utils(message)
- if task_utilis_msg:
- error_msg.extend(task_utilis_msg)
-
- if error_msg:
- final_msg = f'Hey, {tag},\n'
- for __i, __msg in enumerate(error_msg, 1):
- final_msg += f'\n{__i}: {__msg}\n'
- if error_button is not None:
- error_button = error_button.build_menu(2)
- await sendMessage(message, final_msg, error_button)
- await delete_links(message)
- return
-
- if not isLeech:
- if config_dict['DEFAULT_UPLOAD'] == 'rc' and not up or up == 'rc':
- up = config_dict['RCLONE_PATH']
- elif config_dict['DEFAULT_UPLOAD'] == 'ddl' and not up or up == 'ddl':
- up = 'ddl'
- if not up and config_dict['DEFAULT_UPLOAD'] == 'gd':
- up = 'gd'
- user_tds = await fetch_user_tds(message.from_user.id)
- if not drive_id and gd_cat:
- merged_dict = {**categories_dict, **user_tds}
- for drive_name, drive_dict in merged_dict.items():
- if drive_name.casefold() == gd_cat.replace('_', ' ').casefold():
- drive_id, index_link = (drive_dict['drive_id'], drive_dict['index_link'])
- break
- if not drive_id and len(user_tds) == 1:
- drive_id, index_link = next(iter(user_tds.values())).values()
- elif not drive_id and (len(categories_dict) > 1 and len(user_tds) == 0 or len(categories_dict) >= 1 and len(user_tds) > 1):
- drive_id, index_link, is_cancelled = await open_category_btns(message)
- if is_cancelled:
- await delete_links(message)
- return
- if drive_id and not await sync_to_async(GoogleDriveHelper().getFolderData, drive_id):
- return await sendMessage(message, "Google Drive ID validation failed!!")
- if up == 'gd' and not config_dict['GDRIVE_ID'] and not drive_id:
- await sendMessage(message, 'GDRIVE_ID not Provided!')
- await delete_links(message)
- return
- elif not up:
- await sendMessage(message, 'No Rclone Destination!')
- await delete_links(message)
- return
- elif up not in ['rcl', 'gd', 'ddl']:
- if up.startswith('mrcc:'):
- config_path = f'rclone/{message.from_user.id}.conf'
- else:
- config_path = 'rclone.conf'
- if not await aiopath.exists(config_path):
- await sendMessage(message, f'Rclone Config: {config_path} not Exists!')
- await delete_links(message)
- return
- if up != 'gd' and up != 'ddl' and not is_rclone_path(up):
- await sendMessage(message, 'Wrong Rclone Upload Destination!')
- await delete_links(message)
- return
- else:
- if user_dump and (user_dump.isdigit() or user_dump.startswith('-')):
- up = int(user_dump)
- elif user_dump and user_dump.startswith('@'):
- up = user_dump
- elif (ldumps := await fetch_user_dumps(message.from_user.id)):
- if user_dump and user_dump.casefold() == "all":
- up = [dump_id for dump_id in ldumps.values()]
- elif user_dump:
- up = next((dump_id for name_, dump_id in ldumps.items() if user_dump.casefold() == name_.casefold()), '')
- if not up and len(ldumps) == 1:
- up = next(iter(ldumps.values()))
- elif not up:
- up, is_cancelled = await open_dump_btns(message)
- if is_cancelled:
- await delete_links(message)
- return
-
- if up == 'rcl' and not isLeech:
- up = await RcloneList(client, message).get_rclone_path('rcu')
- if not is_rclone_path(up):
- await sendMessage(message, up)
- await delete_links(message)
- return
-
- listener = MirrorLeechListener(message, compress, isLeech=isLeech, tag=tag, sameDir=sameDir, rcFlags=rcf, upPath=up, drive_id=drive_id, index_link=index_link, isYtdlp=True, source_url=link)
-
- if 'mdisk.me' in link:
- name, link = await _mdisk(link, name)
-
- options = {'usenetrc': True, 'cookiefile': 'cookies.txt'}
- if opt:
- yt_opt = opt.split('|')
- for ytopt in yt_opt:
- key, value = map(str.strip, ytopt.split(':', 1))
- if value.startswith('^'):
- if '.' in value or value == '^inf':
- value = float(value.split('^')[1])
- else:
- value = int(value.split('^')[1])
- elif value.lower() == 'true':
- value = True
- elif value.lower() == 'false':
- value = False
- elif value.startswith(('{', '[', '(')) and value.endswith(('}', ']', ')')):
- value = eval(value)
- options[key] = value
-
- options['playlist_items'] = '0'
-
- try:
- result = await sync_to_async(extract_info, link, options)
- except Exception as e:
- msg = str(e).replace('<', ' ').replace('>', ' ')
- await sendMessage(message, f'{tag} {msg}')
- __run_multi()
- await delete_links(message)
- return
-
- __run_multi()
-
- if not select:
- user_id = message.from_user.id
- user_dict = user_data.get(user_id, {})
- if 'format' in options:
- qual = options['format']
- elif user_dict.get('yt_opt'):
- qual = user_dict['yt_opt']
-
- if not qual:
- qual = await YtSelection(client, message).get_quality(result)
- if qual is None:
- return
- await delete_links(message)
- LOGGER.info(f'Downloading with YT-DLP: {link}')
- playlist = 'entries' in result
- ydl = YoutubeDLHelper(listener)
- await ydl.add_download(link, path, name, qual, playlist, opt)
-
-
-
-async def ytdl(client, message):
- _ytdl(client, message)
-
-
-async def ytdlleech(client, message):
- _ytdl(client, message, isLeech=True)
-
-
-bot.add_handler(MessageHandler(ytdl, filters=command(
- BotCommands.YtdlCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
-bot.add_handler(MessageHandler(ytdlleech, filters=command(
- BotCommands.YtdlLeechCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
diff --git a/bot/version.py b/bot/version.py
index 8454c89de9..17bfce4819 100644
--- a/bot/version.py
+++ b/bot/version.py
@@ -1,16 +1,16 @@
#!/usr/bin/env python3
-def get_version() -> str:
+def get_version(MAJOR: str, MINOR: str, PATCH: str, STATE: str) -> str:
'''
Returns the version details. Do not Interfere with this !
+ :param MAJOR: The major version number
+ :param MINOR: The minor version number
+ :param PATCH: The patch version number
+ :param STATE: The state of the release
:return: The version details in the format 'vMAJOR.MINOR.PATCH-STATE'
:rtype: str
'''
- MAJOR = '1'
- MINOR = '2'
- PATCH = '0'
- STATE = 'x'
return f"v{MAJOR}.{MINOR}.{PATCH}-{STATE}"
if __name__ == '__main__':
- print(get_version())
+ print(get_version(MAJOR='1', MINOR='2', PATCH='0', STATE='b'))
diff --git a/captain-definition b/captain-definition
index 0e14f8239a..7c3ec7422d 100644
--- a/captain-definition
+++ b/captain-definition
@@ -1,4 +1,19 @@
{
- "schemaVersion": 2,
- "dockerfilePath": "./Dockerfile"
+ "version": 2,
+ "build": {
+ "context": ".",
+ "dockerfile": "Dockerfile",
+ "args": {
+ // Add any build arguments here
+ }
+ },
+ "services": {
+ // Define services here
+ },
+ "networks": {
+ // Define networks here
+ },
+ "volumes": {
+ // Define volumes here
+ }
}
diff --git a/gen_sa_accounts.py b/gen_sa_accounts.py
index db96f7f5f5..b6b3fda68f 100644
--- a/gen_sa_accounts.py
+++ b/gen_sa_accounts.py
@@ -9,358 +9,18 @@
from random import choice
from time import sleep
-from google.auth.transport.requests import Request
-from google_auth_oauthlib.flow import InstalledAppFlow
+import google.auth.exceptions
+import google.auth.transport.requests
+import google_auth_oauthlib.flow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
-SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/cloud-platform',
- 'https://www.googleapis.com/auth/iam']
-project_create_ops = []
-current_key_dump = []
-sleep_time = 30
+SCOPES = [
+ "https://www.googleapis.com/auth/drive",
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/iam",
+]
+ProjectServiceAccounts = list[dict[str, str]]
+Projects = list[str]
+ServiceAccountKeys = list[tuple[str, str]]
-
-# Create count SAs in project
-def _create_accounts(service, project, count):
- batch = service.new_batch_http_request(callback=_def_batch_resp)
- for _ in range(count):
- aid = _generate_id('mfc-')
- batch.add(service.projects().serviceAccounts().create(name='projects/' + project, body={'accountId': aid,
- 'serviceAccount': {
- 'displayName': aid}}))
- batch.execute()
-
-
-# Create accounts needed to fill project
-def _create_remaining_accounts(iam, project):
- print('Creating accounts in %s' % project)
- sa_count = len(_list_sas(iam, project))
- while sa_count != 100:
- _create_accounts(iam, project, 100 - sa_count)
- sa_count = len(_list_sas(iam, project))
-
-
-# Generate a random id
-def _generate_id(prefix='saf-'):
- chars = '-abcdefghijklmnopqrstuvwxyz1234567890'
- return prefix + ''.join(choice(chars) for _ in range(25)) + choice(chars[1:])
-
-
-# List projects using service
-def _get_projects(service):
- return [i['projectId'] for i in service.projects().list().execute()['projects']]
-
-
-# Default batch callback handler
-def _def_batch_resp(id, resp, exception):
- if exception is not None:
- if str(exception).startswith('