diff --git a/bot/__init__.py b/bot/__init__.py index cfa201d6..b12de4b2 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -67,14 +67,6 @@ def get_config(name: str): # Value: telegram.Message status_reply_dict = {} -try: - users = get_config('AUTHORIZED_CHATS') - users = users.split() - for user in users: - AUTHORIZED_CHATS.add(int(user.strip())) -except: - pass - try: BOT_TOKEN = get_config('BOT_TOKEN') except: @@ -101,6 +93,14 @@ def get_config(name: str): LOGGER.error("DOWNLOAD_DIR env variable is missing") exit(1) +try: + users = get_config('AUTHORIZED_CHATS') + users = users.split() + for user in users: + AUTHORIZED_CHATS.add(int(user.strip())) +except: + pass + try: DATABASE_URL = get_config('DATABASE_URL') if len(DATABASE_URL) == 0: diff --git a/bot/__main__.py b/bot/__main__.py index 9f4b70bd..78dae064 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -4,7 +4,6 @@ from psutil import cpu_percent, cpu_count, disk_usage, virtual_memory, net_io_counters from sys import executable -from telegram import InlineKeyboardMarkup from telegram.ext import CommandHandler from bot import bot, LOGGER, botStartTime, AUTHORIZED_CHATS, DEST_DRIVES, TELEGRAPH, Interval, dispatcher, updater @@ -38,26 +37,16 @@ def ping(update, context): editMessage(f'{end_time - start_time}ms', reply) def stats(update, context): - uptime = get_readable_time(time.time() - botStartTime) total, used, free, disk = disk_usage('/') - total = get_readable_file_size(total) - used = get_readable_file_size(used) - free = get_readable_file_size(free) - sent = get_readable_file_size(net_io_counters().bytes_sent) - recv = get_readable_file_size(net_io_counters().bytes_recv) - cpu = cpu_percent(interval=0.5) - ram = virtual_memory().percent - p_core = cpu_count(logical=False) - l_core = cpu_count(logical=True) stats = '⚙️ SYSTEM STATISTICS' \ - f'\n\nTotal Disk Space: {total}' \ - f'\nUsed: {used} | Free: {free}' \ - f'\n\nUpload: {sent}' \ - f'\nDownload: {recv}' \ - f'\n\nPhysical Cores: {p_core}' \ - f'\nLogical Cores: {l_core}' \ - f'\n\nCPU: {cpu}% | RAM: {ram}%' \ - f'\nDISK: {disk}% | Uptime: {uptime}' + f'\n\nTotal Disk Space: {get_readable_file_size(total)}' \ + f'\nUsed: {get_readable_file_size(used)} | Free: {get_readable_file_size(free)}' \ + f'\n\nUpload: {get_readable_file_size(net_io_counters().bytes_sent)}' \ + f'\nDownload: {get_readable_file_size(net_io_counters().bytes_recv)}' \ + f'\n\nPhysical Cores: {cpu_count(logical=False)}' \ + f'\nLogical Cores: {cpu_count(logical=True)}' \ + f'\n\nCPU: {cpu_percent(interval=0.5)}% | RAM: {virtual_memory().percent}%' \ + f'\nDISK: {disk}% | Uptime: {get_readable_time(time.time() - botStartTime)}' sendMessage(stats, context.bot, update.message) def log(update, context): @@ -129,7 +118,11 @@ def restart(update, context):

/{BotCommands.ShellCommand} <cmd>: Run commands in terminal

-• /{BotCommands.ExecHelpCommand}: Get help about executor +• /{BotCommands.EvalCommand}: Evaluate Python expressions using eval() function +

+• /{BotCommands.ExecCommand}: Execute Python code using exec() function +

+• /{BotCommands.ClearLocalsCommand}: Clear locals of eval() and exec() functions

/{BotCommands.LogCommand}: Get the log file

@@ -146,14 +139,14 @@ def bot_help(update, context): button = ButtonMaker() button.build_button("User", f"https://graph.org/{help_user}") button.build_button("Admin", f"https://graph.org/{help_admin}") - sendMarkup(help_string, context.bot, update.message, InlineKeyboardMarkup(button.build_menu(2))) + sendMarkup(help_string, context.bot, update.message, button.build_menu(2)) def main(): start_cleanup() if os.path.isfile(".restartmsg"): with open(".restartmsg") as f: chat_id, msg_id = map(int, f) - bot.editMessageText("Restarted successfully", chat_id, msg_id, parse_mode='HTMl') + bot.editMessageText("Restarted successfully", chat_id, msg_id, parse_mode='HTML') os.remove(".restartmsg") start_handler = CommandHandler(BotCommands.StartCommand, start, run_async=True) diff --git a/bot/helper/download_utils/gd_downloader.py b/bot/helper/download_utils/gd_downloader.py index 6684f1e8..4eda100f 100644 --- a/bot/helper/download_utils/gd_downloader.py +++ b/bot/helper/download_utils/gd_downloader.py @@ -7,7 +7,7 @@ from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, sendStatusMessage from bot.helper.ext_utils.bot_utils import get_readable_file_size -def add_gd_download(link, listener, is_appdrive, appdict, is_gdtot): +def add_gd_download(link, path, listener, is_appdrive, appdict, is_gdtot): msg = sendMessage(f"Checking: {link}", listener.bot, listener.message) LOGGER.info(f"Checking: {link}") gd = GoogleDriveHelper() @@ -23,7 +23,7 @@ def add_gd_download(link, listener, is_appdrive, appdict, is_gdtot): msg2 += "\n\n⚠️ Task failed" return sendMessage(msg2, listener.bot, listener.message) LOGGER.info(f"Downloading: {name}") - drive = GoogleDriveHelper(name, listener) + drive = GoogleDriveHelper(name, path, size, listener) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) download_status = DownloadStatus(drive, size, listener, gid) with download_dict_lock: diff --git a/bot/helper/drive_utils/gdriveTools.py b/bot/helper/drive_utils/gdriveTools.py index 956beef2..e1b9d03f 100644 --- a/bot/helper/drive_utils/gdriveTools.py +++ b/bot/helper/drive_utils/gdriveTools.py @@ -11,7 +11,6 @@ from tenacity import retry, wait_exponential, stop_after_attempt, \ retry_if_exception_type, RetryError -from telegram import InlineKeyboardMarkup from telegraph.exceptions import RetryAfterError from google.auth.transport.requests import Request @@ -21,53 +20,51 @@ from googleapiclient.errors import Error as GCError, HttpError from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload -from bot import LOGGER, DOWNLOAD_DIR, DRIVE_NAMES, DRIVE_IDS, INDEX_URLS, PARENT_ID, \ +from bot import LOGGER, DRIVE_NAMES, DRIVE_IDS, INDEX_URLS, PARENT_ID, \ IS_TEAM_DRIVE, TELEGRAPH, USE_SERVICE_ACCOUNTS, INDEX_URL, DEST_DRIVES from bot.helper.ext_utils.bot_utils import SetInterval, get_readable_file_size -from bot.helper.ext_utils.fs_utils import get_mime_type, get_path_size +from bot.helper.ext_utils.fs_utils import get_mime_type from bot.helper.telegram_helper.button_builder import ButtonMaker logging.getLogger('googleapiclient.discovery').setLevel(logging.ERROR) if USE_SERVICE_ACCOUNTS: - SERVICE_ACCOUNT_INDEX = randrange(len(os.listdir("accounts"))) + SERVICE_ACCOUNTS_NUMBER = len(os.listdir("accounts")) TELEGRAPH_LIMIT = 60 class GoogleDriveHelper: - def __init__(self, name=None, listener=None): - # Check https://developers.google.com/drive/scopes for all available scopes + + def __init__(self, name=None, path=None, size=0, listener=None): self.__OAUTH_SCOPE = ['https://www.googleapis.com/auth/drive'] self.__G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder" self.__G_DRIVE_BASE_DOWNLOAD_URL = "https://drive.google.com/uc?id={}&export=download" self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL = "https://drive.google.com/drive/folders/{}" + self.__path = path + self.__size = size self.__listener = listener - self.__service = self.authorize() - self._file_uploaded_bytes = 0 - self._file_downloaded_bytes = 0 - self.uploaded_bytes = 0 - self.downloaded_bytes = 0 - self.start_time = 0 - self.total_time = 0 - self.dtotal_time = 0 - self.is_uploading = False - self.is_downloading = False - self.is_cloning = False - self.is_cancelled = False - self.is_errored = False - self.status = None - self.dstatus = None - self.updater = None + self.__service = self.__authorize() + self.__total_bytes = 0 + self.__total_folders = 0 + self.__total_files = 0 + self.__sa_count = 0 + self.__start_time = 0 + self.__total_time = 0 + self.__alt_auth = False + self.__is_uploading = False + self.__is_downloading = False + self.__is_cloning = False + self.__is_cancelled = False + self.__is_errored = False + self.__status = None + self.__updater = None + self.__update_interval = 3 + self._file_processed_bytes = 0 self.name = name - self.update_interval = 3 - self.total_bytes = 0 - self.total_files = 0 - self.total_folders = 0 + self.processed_bytes = 0 self.transferred_size = 0 - self.sa_count = 0 - self.alt_auth = False self.response = {} - self.path = [] + self.telegraph_path = [] self.telegraph_content = [] self.title = "SearchX" self.author_name = "Levi" @@ -75,21 +72,11 @@ def __init__(self, name=None, listener=None): def speed(self): """ - It calculates the average upload speed and returns it in bytes/seconds unit - :return: Upload speed in bytes/second - """ - try: - return self.uploaded_bytes / self.total_time - except: - return 0 - - def dspeed(self): - """ - It calculates the average download speed and returns it in bytes/seconds unit - :return: Download speed in bytes/second + It calculates the average Upload or Download speed and returns it in bytes/seconds unit + :return: Upload or Download speed in bytes/second """ try: - return self.downloaded_bytes / self.dtotal_time + return self.processed_bytes / self.__total_time except: return 0 @@ -99,29 +86,29 @@ def cspeed(self): :return: Clone speed in bytes/second """ try: - return self.transferred_size / int(time.time() - self.start_time) + return self.transferred_size / int(time.time() - self.__start_time) except: return 0 - def authorize(self): + def __authorize(self): creds = None - if not USE_SERVICE_ACCOUNTS: - if os.path.exists('token.json'): - creds = Credentials.from_authorized_user_file('token.json', self.__OAUTH_SCOPE) - if creds and creds.expired and creds.refresh_token: - creds.refresh(Request()) - else: - LOGGER.error("The token.json file is missing") - else: - LOGGER.info(f"Authorizing with {SERVICE_ACCOUNT_INDEX}.json file") + if USE_SERVICE_ACCOUNTS: + self.__service_account_index = randrange(SERVICE_ACCOUNTS_NUMBER) + LOGGER.info(f"Authorizing with {self.__service_account_index}.json file") creds = service_account.Credentials.from_service_account_file( - f'accounts/{SERVICE_ACCOUNT_INDEX}.json', scopes=self.__OAUTH_SCOPE) + f'accounts/{self.__service_account_index}.json', scopes=self.__OAUTH_SCOPE) + elif os.path.exists('token.json'): + creds = Credentials.from_authorized_user_file('token.json', self.__OAUTH_SCOPE) + if creds and creds.expired and creds.refresh_token: + creds.refresh(Request()) + else: + LOGGER.error("The token.json file is missing") return build('drive', 'v3', credentials=creds, cache_discovery=False) - def alt_authorize(self): + def __alt_authorize(self): creds = None - if USE_SERVICE_ACCOUNTS and not self.alt_auth: - self.alt_auth = True + if USE_SERVICE_ACCOUNTS and not self.__alt_auth: + self.__alt_auth = True if os.path.exists('token.json'): LOGGER.info("Authorizing with token.json file") creds = Credentials.from_authorized_user_file('token.json', self.__OAUTH_SCOPE) @@ -132,8 +119,17 @@ def alt_authorize(self): LOGGER.error("The token.json file is missing") return None + def __switchServiceAccount(self): + if self.__service_account_index == SERVICE_ACCOUNTS_NUMBER - 1: + self.__service_account_index = 0 + else: + self.__service_account_index += 1 + self.__sa_count += 1 + LOGGER.info(f"Authorizing with {self.__service_account_index}.json file") + self.__service = self.__authorize() + @staticmethod - def getIdFromUrl(link: str): + def __getIdFromUrl(link: str): if "folders" in link or "file" in link: regex = r'https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)' res = re.search(regex, link) @@ -143,9 +139,97 @@ def getIdFromUrl(link: str): parsed = urlparse(link) return parse_qs(parsed.query)['id'][0] + @retry(wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(GCError)) + def __getFileMetadata(self, file_id): + return self.__service.files().get( + supportsAllDrives=True, + fileId=file_id, + fields='name, id, mimeType, size').execute() + + @retry(wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(GCError)) + def __getFilesByFolderId(self, folder_id): + page_token = None + query = f"'{folder_id}' in parents and trashed = false" + files = [] + while True: + response = self.__service.files().list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + q=query, + spaces='drive', + pageSize=200, + fields='nextPageToken, files(id, name, mimeType, size, shortcutDetails)', + orderBy='folder, name', + pageToken=page_token).execute() + files.extend(response.get('files', [])) + page_token = response.get('nextPageToken', None) + if page_token is None: + break + return files + + def __gDrive_file(self, filee): + size = int(filee.get('size', 0)) + self.__total_bytes += size + + def __gDrive_directory(self, drive_folder): + files = self.__getFilesByFolderId(drive_folder['id']) + if len(files) == 0: + return + for filee in files: + shortcut_details = filee.get('shortcutDetails') + if shortcut_details is not None: + mime_type = shortcut_details['targetMimeType'] + file_id = shortcut_details['targetId'] + filee = self.__getFileMetadata(file_id) + else: + mime_type = filee.get('mimeType') + if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: + self.__total_folders += 1 + self.__gDrive_directory(filee) + else: + self.__total_files += 1 + self.__gDrive_file(filee) + + def helper(self, link): + try: + file_id = self.__getIdFromUrl(link) + except (KeyError, IndexError): + msg = "Drive ID not found" + LOGGER.error(msg) + return msg, "", "", "" + try: + meta = self.__getFileMetadata(file_id) + name = meta.get('name') + if meta.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE: + self.__gDrive_directory(meta) + else: + self.__total_files += 1 + self.__gDrive_file(meta) + size = self.__total_bytes + files = self.__total_files + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace('>', '').replace('<', '') + if "File not found" in err: + token_service = self.__alt_authorize() + if token_service is not None: + self.__service = token_service + return self.helper(link) + msg = "File not found" + else: + msg = str(err) + return msg, "", "", "" + return "", size, name, files + def deleteFile(self, link: str): try: - file_id = self.getIdFromUrl(link) + file_id = self.__getIdFromUrl(link) except (KeyError, IndexError): msg = "Drive ID not found" LOGGER.error(msg) @@ -160,7 +244,7 @@ def deleteFile(self, link: str): if "File not found" in str(err): msg = "File not found" elif "insufficientFilePermissions" in str(err): - token_service = self.alt_authorize() + token_service = self.__alt_authorize() if token_service is not None: self.__service = token_service return self.deleteFile(link) @@ -170,16 +254,6 @@ def deleteFile(self, link: str): LOGGER.error(msg) return msg - def switchServiceAccount(self): - global SERVICE_ACCOUNT_INDEX - service_account_count = len(os.listdir("accounts")) - if SERVICE_ACCOUNT_INDEX == service_account_count - 1: - SERVICE_ACCOUNT_INDEX = 0 - self.sa_count += 1 - SERVICE_ACCOUNT_INDEX += 1 - LOGGER.info(f"Authorizing with {SERVICE_ACCOUNT_INDEX}.json file") - self.__service = self.authorize() - def __set_permission_public(self, file_id): permissions = { 'type': 'anyone', @@ -204,7 +278,7 @@ def __set_permission_email(self, file_id, email): def setPermission(self, link, access): try: - file_id = self.getIdFromUrl(link) + file_id = self.__getIdFromUrl(link) except (KeyError, IndexError): msg = "Drive ID not found" LOGGER.error(msg) @@ -223,7 +297,7 @@ def setPermission(self, link, access): if "File not found" in str(err): msg = "File not found" elif "insufficientFilePermissions" in str(err): - token_service = self.alt_authorize() + token_service = self.__alt_authorize() if token_service is not None: self.__service = token_service return self.setPermission(link, access) @@ -235,7 +309,25 @@ def setPermission(self, link, access): @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(3), retry=retry_if_exception_type(GCError)) - def copyFile(self, file_id, dest_id): + def __create_directory(self, directory_name, parent_id): + file_metadata = { + "name": directory_name, + "mimeType": self.__G_DRIVE_DIR_MIME_TYPE + } + if parent_id is not None: + file_metadata["parents"] = [parent_id] + file = self.__service.files().create( + supportsAllDrives=True, + body=file_metadata).execute() + file_id = file.get("id") + if not IS_TEAM_DRIVE: + self.__set_permission_public(file_id) + return file_id + + @retry(wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(GCError)) + def __copyFile(self, file_id, dest_id): body = { 'parents': [dest_id] } @@ -250,60 +342,45 @@ def copyFile(self, file_id, dest_id): reason = json.loads(err.content).get('error').get('errors')[0].get('reason') if reason in ['userRateLimitExceeded', 'dailyLimitExceeded']: if USE_SERVICE_ACCOUNTS: - if self.sa_count == len(os.listdir("accounts")) or self.sa_count > 50: - self.is_cancelled = True + if self.__sa_count == SERVICE_ACCOUNTS_NUMBER: + self.__is_cancelled = True raise err else: - self.switchServiceAccount() - return self.copyFile(file_id, dest_id) + self.__switchServiceAccount() + return self.__copyFile(file_id, dest_id) else: - self.is_cancelled = True + self.__is_cancelled = True LOGGER.info(f"Warning: {reason}") raise err else: raise err - @retry(wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(GCError)) - def getFileMetadata(self, file_id): - return self.__service.files().get( - supportsAllDrives=True, - fileId=file_id, - fields='name, id, mimeType, size').execute() - - @retry(wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(GCError)) - def getFilesByFolderId(self, folder_id): - page_token = None - query = f"'{folder_id}' in parents and trashed = false" - files = [] - while True: - response = self.__service.files().list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - q=query, - spaces='drive', - pageSize=200, - fields='nextPageToken, files(id, name, mimeType, size)', - pageToken=page_token).execute() - for file in response.get('files', []): - files.append(file) - page_token = response.get('nextPageToken', None) - if page_token is None: + def __cloneFolder(self, name, local_path, folder_id, parent_id): + files = self.__getFilesByFolderId(folder_id) + if len(files) == 0: + return parent_id + for file in files: + if file.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE: + self.__total_folders += 1 + file_path = os.path.join(local_path, file.get('name')) + current_dir_id = self.__create_directory(file.get('name'), parent_id) + self.__cloneFolder(file.get('name'), file_path, file.get('id'), current_dir_id) + else: + self.__total_files += 1 + self.transferred_size += int(file.get('size', 0)) + self.__copyFile(file.get('id'), parent_id) + if self.__is_cancelled: break - return files def clone(self, link, key): - self.is_cloning = True - self.start_time = time.time() - self.total_files = 0 - self.total_folders = 0 + self.__is_cloning = True + self.__start_time = time.time() + self.__total_files = 0 + self.__total_folders = 0 parent_id = PARENT_ID index_url = INDEX_URL try: - file_id = self.getIdFromUrl(link) + file_id = self.__getIdFromUrl(link) except (KeyError, IndexError): msg = "Drive ID not found" LOGGER.error(msg) @@ -316,29 +393,29 @@ def clone(self, link, key): index_url = None msg = "" try: - meta = self.getFileMetadata(file_id) + meta = self.__getFileMetadata(file_id) name = meta.get("name") mime_type = meta.get("mimeType") if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - dir_id = self.create_directory(meta.get('name'), parent_id) - self.cloneFolder(meta.get('name'), meta.get('name'), meta.get('id'), dir_id) + dir_id = self.__create_directory(meta.get('name'), parent_id) + self.__cloneFolder(meta.get('name'), meta.get('name'), meta.get('id'), dir_id) durl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - if self.is_cancelled: + if self.__is_cancelled: LOGGER.info(f"Deleting cloned data from Drive") self.deleteFile(durl) return "The clone task has been cancelled" msg += f'Name: {name}' msg += f'\nSize: {get_readable_file_size(self.transferred_size)}' msg += f'\nType: Folder' - msg += f'\nSubFolders: {self.total_folders}' - msg += f'\nFiles: {self.total_files}' + msg += f'\nSubFolders: {self.__total_folders}' + msg += f'\nFiles: {self.__total_files}' msg += f'\n\nDrive Link' if index_url is not None: url_path = requests.utils.quote(f'{meta.get("name")}', safe='') url = f'{index_url}/{url_path}/' msg += f' | Index Link' else: - file = self.copyFile(meta.get('id'), parent_id) + file = self.__copyFile(meta.get('id'), parent_id) msg += f'Name: {file.get("name")}' if mime_type is None: mime_type = 'File' @@ -357,7 +434,7 @@ def clone(self, link, key): if "User rate limit exceeded" in err: msg = "User rate limit exceeded" elif "File not found" in err: - token_service = self.alt_authorize() + token_service = self.__alt_authorize() if token_service is not None: self.__service = token_service return self.clone(link, key) @@ -366,74 +443,39 @@ def clone(self, link, key): msg = str(err) return msg - def cloneFolder(self, name, local_path, folder_id, parent_id): - files = self.getFilesByFolderId(folder_id) - if len(files) == 0: - return parent_id - for file in files: - if file.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE: - self.total_folders += 1 - file_path = os.path.join(local_path, file.get('name')) - current_dir_id = self.create_directory(file.get('name'), parent_id) - self.cloneFolder(file.get('name'), file_path, file.get('id'), current_dir_id) - else: - self.total_files += 1 - self.transferred_size += int(file.get('size', 0)) - self.copyFile(file.get('id'), parent_id) - if self.is_cancelled: - break - - @retry(wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(GCError)) - def create_directory(self, directory_name, parent_id): - file_metadata = { - "name": directory_name, - "mimeType": self.__G_DRIVE_DIR_MIME_TYPE - } - if parent_id is not None: - file_metadata["parents"] = [parent_id] - file = self.__service.files().create( - supportsAllDrives=True, - body=file_metadata).execute() - file_id = file.get("id") - if not IS_TEAM_DRIVE: - self.__set_permission_public(file_id) - return file_id - def count(self, link): try: - file_id = self.getIdFromUrl(link) + file_id = self.__getIdFromUrl(link) except (KeyError, IndexError): msg = "Drive ID not found" LOGGER.error(msg) return msg msg = "" try: - meta = self.getFileMetadata(file_id) + meta = self.__getFileMetadata(file_id) mime_type = meta.get('mimeType') if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.gDrive_directory(meta) + self.__gDrive_directory(meta) msg += f'Name: {meta.get("name")}' - msg += f'\nSize: {get_readable_file_size(self.total_bytes)}' + msg += f'\nSize: {get_readable_file_size(self.__total_bytes)}' msg += f'\nType: Folder' - msg += f'\nSubFolders: {self.total_folders}' + msg += f'\nSubFolders: {self.__total_folders}' else: msg += f'Name: {meta.get("name")}' if mime_type is None: mime_type = 'File' - self.total_files += 1 - self.gDrive_file(meta) - msg += f'\nSize: {get_readable_file_size(self.total_bytes)}' + self.__total_files += 1 + self.__gDrive_file(meta) + msg += f'\nSize: {get_readable_file_size(self.__total_bytes)}' msg += f'\nType: {mime_type}' - msg += f'\nFiles: {self.total_files}' + msg += f'\nFiles: {self.__total_files}' except Exception as err: if isinstance(err, RetryError): LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}") err = err.last_attempt.exception() err = str(err).replace('>', '').replace('<', '') if "File not found" in err: - token_service = self.alt_authorize() + token_service = self.__alt_authorize() if token_service is not None: self.__service = token_service return self.count(link) @@ -442,69 +484,270 @@ def count(self, link): msg = str(err) return msg - def gDrive_file(self, filee): - size = int(filee.get('size', 0)) - self.total_bytes += size + def _progress(self): + if self.__status is not None: + chunk_size = self.__status.total_size * self.__status.progress() - self._file_processed_bytes + self._file_processed_bytes = self.__status.total_size * self.__status.progress() + self.processed_bytes += chunk_size + self.__total_time += self.__update_interval - def gDrive_directory(self, drive_folder): - files = self.getFilesByFolderId(drive_folder['id']) - if len(files) == 0: + @retry(wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=(retry_if_exception_type(GCError) | retry_if_exception_type(IOError))) + def __upload_file(self, file_path, file_name, mime_type, parent_id): + file_metadata = { + 'name': file_name, + 'mimeType': mime_type + } + if parent_id is not None: + file_metadata['parents'] = [parent_id] + if os.path.getsize(file_path) == 0: + media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=False) + response = self.__service.files().create( + supportsAllDrives=True, + body=file_metadata, + media_body=media_body).execute() + if not IS_TEAM_DRIVE: + self.__set_permission_public(response['id']) + drive_file = self.__service.files().get( + supportsAllDrives=True, + fileId=response['id']).execute() + download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id')) + return download_url + media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=True, + chunksize=50 * 1024 * 1024) + drive_file = self.__service.files().create( + supportsAllDrives=True, + body=file_metadata, + media_body=media_body) + response = None + while response is None: + if self.__is_cancelled: + break + try: + self.__status, response = drive_file.next_chunk() + except HttpError as err: + if err.resp.get('content-type', '').startswith('application/json'): + reason = json.loads(err.content).get('error').get('errors')[0].get('reason') + if reason not in ['userRateLimitExceeded', 'dailyLimitExceeded']: + raise err + if USE_SERVICE_ACCOUNTS: + self.__switchServiceAccount() + return self.__upload_file(file_path, file_name, mime_type, parent_id) + else: + LOGGER.error(f"Warning: {reason}") + raise err + if self.__is_cancelled: return - for filee in files: - shortcut_details = filee.get('shortcutDetails') - if shortcut_details is not None: - mime_type = shortcut_details['targetMimeType'] - file_id = shortcut_details['targetId'] - filee = self.getFileMetadata(file_id) - else: - mime_type = filee.get('mimeType') - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.total_folders += 1 - self.gDrive_directory(filee) + self._file_processed_bytes = 0 + if not IS_TEAM_DRIVE: + self.__set_permission_public(response['id']) + drive_file = self.__service.files().get( + supportsAllDrives=True, + fileId=response['id']).execute() + download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id')) + return download_url + + def __upload_dir(self, input_directory, parent_id): + list_dirs = os.listdir(input_directory) + if len(list_dirs) == 0: + return parent_id + new_id = None + for item in list_dirs: + current_file_name = os.path.join(input_directory, item) + if os.path.isdir(current_file_name): + current_dir_id = self.__create_directory(item, parent_id) + new_id = self.__upload_dir(current_file_name, current_dir_id) + self.__total_folders += 1 else: - self.total_files += 1 - self.gDrive_file(filee) + mime_type = get_mime_type(current_file_name) + file_name = current_file_name.split("/")[-1] + # 'current_file_name' will have the full path + self.__upload_file(current_file_name, file_name, mime_type, parent_id) + self.__total_files += 1 + new_id = parent_id + if self.__is_cancelled: + break + return new_id - def helper(self, link): - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - msg = "Drive ID not found" - LOGGER.error(msg) - return msg, "", "", "" + def upload(self, file_name: str): + self.__is_uploading = True + file_path = f"{self.__path}/{file_name}" + size = get_readable_file_size(self.__size) + self.__updater = SetInterval(self.__update_interval, self._progress) try: - meta = self.getFileMetadata(file_id) - name = meta.get('name') - if meta.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE: - self.gDrive_directory(meta) + if os.path.isfile(file_path): + mime_type = get_mime_type(file_path) + link = self.__upload_file(file_path, file_name, mime_type, PARENT_ID) + if self.__is_cancelled: + return + if link is None: + raise Exception("The upload task has been manually cancelled") else: - self.total_files += 1 - self.gDrive_file(meta) - size = self.total_bytes - files = self.total_files + mime_type = 'Folder' + dir_id = self.__create_directory(os.path.basename(os.path.abspath(file_name)), PARENT_ID) + result = self.__upload_dir(file_path, dir_id) + if result is None: + raise Exception("The upload task has been manually cancelled") + link = f"https://drive.google.com/folderview?id={dir_id}" + if self.__is_cancelled: + return except Exception as err: if isinstance(err, RetryError): LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}") err = err.last_attempt.exception() - err = str(err).replace('>', '').replace('<', '') - if "File not found" in err: - token_service = self.alt_authorize() - if token_service is not None: - self.__service = token_service - return self.helper(link) - msg = "File not found" + self.__listener.onUploadError(str(err)) + self.__is_errored = True + finally: + self.__updater.cancel() + if self.__is_cancelled and not self.__is_errored: + if mime_type == 'Folder': + LOGGER.info("Deleting uploaded data from Drive") + link = f"https://drive.google.com/folderview?id={dir_id}" + self.deleteFile(link) + return + elif self.__is_errored: + return + self.__listener.onUploadComplete(link, size, self.__total_files, self.__total_folders, mime_type, self.name) + + @retry(wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=(retry_if_exception_type(GCError) | retry_if_exception_type(IOError))) + def __download_file(self, file_id, path, filename, mime_type): + request = self.__service.files().get_media(fileId=file_id) + filename = filename.replace('/', '') + if len(filename.encode()) > 255: + ext = os.path.splitext(filename)[1] + filename = f"{filename[:245]}{ext}" + if self.name.endswith(ext): + self.name = filename + fh = FileIO(f"{path}/{filename}", 'wb') + downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024) + done = False + while not done: + if self.__is_cancelled: + fh.close() + break + try: + self.__status, done = downloader.next_chunk() + except HttpError as err: + if err.resp.get('content-type', '').startswith('application/json'): + reason = json.loads(err.content).get('error').get('errors')[0].get('reason') + if reason not in ['downloadQuotaExceeded', 'dailyLimitExceeded']: + raise err + if USE_SERVICE_ACCOUNTS: + if self.__sa_count == SERVICE_ACCOUNTS_NUMBER: + self.__is_cancelled = True + raise err + else: + self.__switchServiceAccount() + return self.__download_file(file_id, path, filename, mime_type) + else: + LOGGER.error(f"Warning: {reason}") + raise err + self._file_processed_bytes = 0 + + def __download_folder(self, folder_id, path, folder_name): + folder_name = folder_name.replace('/', '') + if not os.path.exists(f"{path}/{folder_name}"): + os.makedirs(f"{path}/{folder_name}") + path += f"/{folder_name}" + result = self.__getFilesByFolderId(folder_id) + if len(result) == 0: + return + result = sorted(result, key=lambda k: k['name']) + for item in result: + file_id = item['id'] + filename = item['name'] + shortcut_details = item.get('shortcutDetails') + if shortcut_details is not None: + file_id = shortcut_details['targetId'] + mime_type = shortcut_details['targetMimeType'] else: - msg = str(err) - return msg, "", "", "" - return "", size, name, files + mime_type = item.get('mimeType') + if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: + self.__download_folder(file_id, path, filename) + elif not os.path.isfile(f"{path}{filename}"): + self.__download_file(file_id, path, filename, mime_type) + if self.__is_cancelled: + break - def escapes(self, str_val): + def download(self, link): + self.__is_downloading = True + file_id = self.__getIdFromUrl(link) + self.__updater = SetInterval(self.__update_interval, self._progress) + try: + meta = self.__getFileMetadata(file_id) + if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: + self.__download_folder(file_id, self.__path, meta.get('name')) + else: + os.makedirs(self.__path) + self.__download_file(file_id, self.__path, meta.get('name'), meta.get('mimeType')) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace('>', '').replace('<', '') + if "downloadQuotaExceeded" in err: + err = "Download quota exceeded." + elif "File not found" in err: + token_service = self.__alt_authorize() + if token_service is not None: + self.__service = token_service + self.__updater.cancel() + return self.download(link) + self.__listener.onDownloadError(err) + self.__is_cancelled = True + finally: + self.__updater.cancel() + if self.__is_cancelled: + return + self.__listener.onDownloadComplete() + + def cancel_task(self): + self.__is_cancelled = True + if self.__is_downloading: + LOGGER.info(f"Cancelling download: {self.name}") + self.__listener.onDownloadError("The download task has been cancelled") + elif self.__is_cloning: + LOGGER.info(f"Cancelling clone: {self.name}") + elif self.__is_uploading: + LOGGER.info(f"Cancelling upload: {self.name}") + self.__listener.onUploadError("The upload task has been cancelled") + + def __escapes(self, str_val): chars = ['\\', "'", '"', r'\a', r'\b', r'\f', r'\n', r'\r', r'\t'] for char in chars: - str_val = str_val.replace(char, '\\' + char) + str_val = str_val.replace(char, f'\\{char}') return str_val - def receive_callback(self, request_id, response, exception): + def __create_page(self, acc, content): + try: + self.telegraph_path.append( + acc.create_page( + title=self.title, + author_name=self.author_name, + author_url=self.author_url, + html_content=content)['path']) + except RetryAfterError as e: + LOGGER.info(f"Cooldown: {e.retry_after} seconds") + time.sleep(e.retry_after) + self.__create_page(acc, content) + + def __edit_page(self, acc, content, path): + try: + acc.edit_page( + path=path, + title=self.title, + author_name=self.author_name, + author_url=self.author_url, + html_content=content) + except RetryAfterError as e: + LOGGER.info(f"Cooldown: {e.retry_after} seconds") + time.sleep(e.retry_after) + self.__edit_page(acc, content, path) + + def __receive_callback(self, request_id, response, exception): # request_id = order number of request = shared drive index (1 based) if exception is not None: exception = str(exception).replace('>', '').replace('<', '') @@ -513,8 +756,8 @@ def receive_callback(self, request_id, response, exception): if response['files']: self.response[request_id] = response - def drive_query(self, DRIVE_IDS, search_type, file_name): - batch = self.__service.new_batch_http_request(self.receive_callback) + def __drive_query(self, DRIVE_IDS, search_type, file_name): + batch = self.__service.new_batch_http_request(self.__receive_callback) query = f"name contains '{file_name}' and " if search_type is not None: if search_type == '-d': @@ -526,7 +769,7 @@ def drive_query(self, DRIVE_IDS, search_type, file_name): if parent_id == "root": batch.add( self.__service.files().list( - q=query + " and 'me' in owners", + q=f"{query} and 'me' in owners", pageSize=1000, spaces='drive', fields='files(id, name, mimeType, size)', @@ -546,7 +789,7 @@ def drive_query(self, DRIVE_IDS, search_type, file_name): batch.execute() def drive_list(self, file_name): - file_name = self.escapes(file_name) + file_name = self.__escapes(file_name) search_type = None if re.search("^-d ", file_name, re.IGNORECASE): search_type = '-d' @@ -560,10 +803,10 @@ def drive_list(self, file_name): response_count = 0 total_acc = len(TELEGRAPH) start_time = time.time() - token_service = self.alt_authorize() + token_service = self.__alt_authorize() if token_service is not None: self.__service = token_service - self.drive_query(DRIVE_IDS, search_type, file_name) + self.__drive_query(DRIVE_IDS, search_type, file_name) add_title_msg = True for files in self.response: index = int(files) - 1 @@ -606,293 +849,26 @@ def drive_list(self, file_name): if i != 0: # Add previous page link - self.telegraph_content[i] += f'Previous' \ + self.telegraph_content[i] += f'Previous' \ f' | Page {i+1}/{total_pages}' else: self.telegraph_content[i] += f'Page {i+1}/{total_pages}' - self.create_page( + self.__create_page( TELEGRAPH[acc_no], self.telegraph_content[i]) if i != 0: # Edit previous page to add next page link - self.telegraph_content[i-1] += f' | Next' + self.telegraph_content[i-1] += f' | Next' - self.edit_page( + self.__edit_page( TELEGRAPH[(acc_no - 1) if i % page_per_acc == 0 else acc_no], self.telegraph_content[i-1], - self.path[i-1]) + self.telegraph_path[i-1]) msg = f"Found {response_count} results matching '{file_name}' in {len(DRIVE_IDS)} Drives " \ f"(Time taken {round(time.time() - start_time, 2)}s)" button = ButtonMaker() - button.build_button("VIEW RESULTS 🗂️", f"https://graph.org/{self.path[0]}") - return msg, InlineKeyboardMarkup(button.build_menu(1)) - - def create_page(self, acc, content): - try: - self.path.append( - acc.create_page( - title=self.title, - author_name=self.author_name, - author_url=self.author_url, - html_content=content)['path']) - except RetryAfterError as e: - LOGGER.info(f"Cooldown: {e.retry_after} seconds") - time.sleep(e.retry_after) - self.create_page(acc, content) - - def edit_page(self, acc, content, path): - try: - acc.edit_page( - path=path, - title=self.title, - author_name=self.author_name, - author_url=self.author_url, - html_content=content) - except RetryAfterError as e: - LOGGER.info(f"Cooldown: {e.retry_after} seconds") - time.sleep(e.retry_after) - self.edit_page(acc, content, path) - - def upload(self, file_name: str): - self.is_downloading = False - self.is_uploading = True - file_dir = f"{DOWNLOAD_DIR}{self.__listener.message.message_id}" - file_path = f"{file_dir}/{file_name}" - size = get_readable_file_size(get_path_size(file_path)) - self.updater = SetInterval(self.update_interval, self._on_upload_progress) - try: - if os.path.isfile(file_path): - mime_type = get_mime_type(file_path) - link = self.upload_file(file_path, file_name, mime_type, PARENT_ID) - if self.is_cancelled: - return - if link is None: - raise Exception("The upload task has been manually cancelled") - else: - mime_type = 'Folder' - dir_id = self.create_directory(os.path.basename(os.path.abspath(file_name)), PARENT_ID) - result = self.upload_dir(file_path, dir_id) - if result is None: - raise Exception("The upload task has been manually cancelled") - link = f"https://drive.google.com/folderview?id={dir_id}" - if self.is_cancelled: - return - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - self.__listener.onUploadError(str(err)) - self.is_errored = True - finally: - self.updater.cancel() - if self.is_cancelled and not self.is_errored: - if mime_type == 'Folder': - LOGGER.info("Deleting uploaded data from Drive") - link = f"https://drive.google.com/folderview?id={dir_id}" - self.deleteFile(link) - return - elif self.is_errored: - return - self.__listener.onUploadComplete(link, size, self.total_files, self.total_folders, mime_type, self.name) - - def upload_dir(self, input_directory, parent_id): - list_dirs = os.listdir(input_directory) - if len(list_dirs) == 0: - return parent_id - new_id = None - for item in list_dirs: - current_file_name = os.path.join(input_directory, item) - if os.path.isdir(current_file_name): - current_dir_id = self.create_directory(item, parent_id) - new_id = self.upload_dir(current_file_name, current_dir_id) - self.total_folders += 1 - else: - mime_type = get_mime_type(current_file_name) - file_name = current_file_name.split("/")[-1] - # 'current_file_name' will have the full path - self.upload_file(current_file_name, file_name, mime_type, parent_id) - self.total_files += 1 - new_id = parent_id - if self.is_cancelled: - break - return new_id - - @retry(wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=(retry_if_exception_type(GCError) | retry_if_exception_type(IOError))) - def upload_file(self, file_path, file_name, mime_type, parent_id): - file_metadata = { - 'name': file_name, - 'mimeType': mime_type - } - if parent_id is not None: - file_metadata['parents'] = [parent_id] - if os.path.getsize(file_path) == 0: - media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=False) - response = self.__service.files().create( - supportsAllDrives=True, - body=file_metadata, - media_body=media_body).execute() - if not IS_TEAM_DRIVE: - self.__set_permission_public(response['id']) - drive_file = self.__service.files().get( - supportsAllDrives=True, - fileId=response['id']).execute() - download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id')) - return download_url - media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=True, - chunksize=50 * 1024 * 1024) - drive_file = self.__service.files().create( - supportsAllDrives=True, - body=file_metadata, - media_body=media_body) - response = None - while response is None: - if self.is_cancelled: - break - try: - self.status, response = drive_file.next_chunk() - except HttpError as err: - if err.resp.get('content-type', '').startswith('application/json'): - reason = json.loads(err.content).get('error').get('errors')[0].get('reason') - if reason not in ['userRateLimitExceeded', 'dailyLimitExceeded']: - raise err - if USE_SERVICE_ACCOUNTS: - self.switchServiceAccount() - return self.upload_file(file_path, file_name, mime_type, parent_id) - else: - LOGGER.error(f"Warning: {reason}") - raise err - if self.is_cancelled: - return - self._file_uploaded_bytes = 0 - if not IS_TEAM_DRIVE: - self.__set_permission_public(response['id']) - drive_file = self.__service.files().get( - supportsAllDrives=True, - fileId=response['id']).execute() - download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id')) - return download_url - - def _on_upload_progress(self): - if self.status is not None: - chunk_size = self.status.total_size * self.status.progress() - self._file_uploaded_bytes - self._file_uploaded_bytes = self.status.total_size * self.status.progress() - self.uploaded_bytes += chunk_size - self.total_time += self.update_interval - - def download(self, link): - self.is_downloading = True - file_id = self.getIdFromUrl(link) - self.updater = SetInterval(self.update_interval, self._on_download_progress) - try: - meta = self.getFileMetadata(file_id) - path = f"{DOWNLOAD_DIR}{self.__listener.uid}/" - if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: - self.download_folder(file_id, path, meta.get('name')) - else: - os.makedirs(path) - self.download_file(file_id, path, meta.get('name'), meta.get('mimeType')) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace('>', '').replace('<', '') - if "downloadQuotaExceeded" in err: - err = "Download quota exceeded." - elif "File not found" in err: - token_service = self.alt_authorize() - if token_service is not None: - self.__service = token_service - self.updater.cancel() - return self.download(link) - self.__listener.onDownloadError(err) - self.is_cancelled = True - finally: - self.updater.cancel() - if self.is_cancelled: - return - self.__listener.onDownloadComplete() - - def download_folder(self, folder_id, path, folder_name): - folder_name = folder_name.replace('/', '') - if not os.path.exists(path + folder_name): - os.makedirs(path + folder_name) - path += folder_name + '/' - result = self.getFilesByFolderId(folder_id) - if len(result) == 0: - return - result = sorted(result, key=lambda k: k['name']) - for item in result: - file_id = item['id'] - filename = item['name'] - shortcut_details = item.get('shortcutDetails') - if shortcut_details is not None: - file_id = shortcut_details['targetId'] - mime_type = shortcut_details['targetMimeType'] - else: - mime_type = item.get('mimeType') - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.download_folder(file_id, path, filename) - elif not os.path.isfile(path + filename): - self.download_file(file_id, path, filename, mime_type) - if self.is_cancelled: - break - - @retry(wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=(retry_if_exception_type(GCError) | retry_if_exception_type(IOError))) - def download_file(self, file_id, path, filename, mime_type): - request = self.__service.files().get_media(fileId=file_id) - filename = filename.replace('/', '') - if len(filename.encode()) > 255: - ext = os.path.splitext(filename)[1] - filename = filename[:245] + ext - if self.name.endswith(ext): - self.name = filename - fh = FileIO('{}{}'.format(path, filename), 'wb') - downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024) - done = False - while not done: - if self.is_cancelled: - fh.close() - break - try: - self.dstatus, done = downloader.next_chunk() - except HttpError as err: - if err.resp.get('content-type', '').startswith('application/json'): - reason = json.loads(err.content).get('error').get('errors')[0].get('reason') - if reason not in ['downloadQuotaExceeded', 'dailyLimitExceeded']: - raise err - if USE_SERVICE_ACCOUNTS: - if self.sa_count == len(os.listdir("accounts")) or self.sa_count > 50: - self.is_cancelled = True - raise err - else: - self.switchServiceAccount() - return self.download_file(file_id, path, filename, mime_type) - else: - LOGGER.error(f"Warning: {reason}") - raise err - self._file_downloaded_bytes = 0 - - def _on_download_progress(self): - if self.dstatus is not None: - chunk_size = self.dstatus.total_size * self.dstatus.progress() - self._file_downloaded_bytes - self._file_downloaded_bytes = self.dstatus.total_size * self.dstatus.progress() - self.downloaded_bytes += chunk_size - self.dtotal_time += self.update_interval - - def cancel_task(self): - self.is_cancelled = True - if self.is_downloading: - LOGGER.info(f"Cancelling download: {self.name}") - self.__listener.onDownloadError("The download task has been cancelled") - elif self.is_cloning: - LOGGER.info(f"Cancelling clone: {self.name}") - elif self.is_uploading: - LOGGER.info(f"Cancelling upload: {self.name}") - self.__listener.onUploadError("The upload task has been cancelled") + button.build_button("VIEW RESULTS 🗂️", f"https://graph.org/{self.telegraph_path[0]}") + return msg, button.build_menu(1) diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py index 6de99570..d0dfe9c1 100644 --- a/bot/helper/ext_utils/bot_utils.py +++ b/bot/helper/ext_utils/bot_utils.py @@ -13,11 +13,11 @@ URL_REGEX = r'(?:(?:https?|ftp):\/\/)?[\w/\-?=%.]+\.[\w/\-?=%.]+' class TaskStatus: - STATUS_CLONING = "Cloning...♻️" - STATUS_DOWNLOADING = "Downloading...📥" - STATUS_UPLOADING = "Uploading...📤" - STATUS_ARCHIVING = "Archiving...🔐" - STATUS_EXTRACTING = "Extracting...📂" + STATUS_CLONING = "Cloning" + STATUS_DOWNLOADING = "Downloading" + STATUS_UPLOADING = "Uploading" + STATUS_ARCHIVING = "Archiving" + STATUS_EXTRACTING = "Extracting" class SetInterval: def __init__(self, interval, action): @@ -58,46 +58,32 @@ def get_readable_message(): with download_dict_lock: msg = "" for download in list(download_dict.values()): - msg += f"Name: {escape(str(download.name()))}" - msg += f"\nStatus: {download.status()}" + msg += f"{download.status()}: {escape(str(download.name()))}" msg += f"\n{get_progress_bar_string(download)} {download.progress()}" - if download.status() == TaskStatus.STATUS_CLONING: - msg += f"\nCloned: {get_readable_file_size(download.processed_bytes())} / {download.size()}" - msg += f"\nTransfers: {download.processed_files()} / {download.files()}" - elif download.status() == TaskStatus.STATUS_DOWNLOADING: - msg += f"\nDownloaded: {get_readable_file_size(download.processed_bytes())} / {download.size()}" - elif download.status() == TaskStatus.STATUS_UPLOADING: - msg += f"\nUploaded: {get_readable_file_size(download.processed_bytes())} / {download.size()}" - elif download.status() == TaskStatus.STATUS_ARCHIVING: - msg += f"\nArchived: {get_readable_file_size(download.processed_bytes())} / {download.size()}" - elif download.status() == TaskStatus.STATUS_EXTRACTING: - msg += f"\nExtracted: {get_readable_file_size(download.processed_bytes())} / {download.size()}" + msg += f"\nProcessed: {get_readable_file_size(download.processed_bytes())} / {download.size()}" msg += f"\nSpeed: {download.speed()} | ETA: {download.eta()}" msg += f"\n/{BotCommands.CancelCommand} {download.gid()}" msg += "\n\n" if len(msg) == 0: return None - cpu = cpu_percent(interval=0.5) - ram = virtual_memory().percent - disk = disk_usage('/').percent - uptime = get_readable_time(time.time() - botStartTime) - sysmsg = f"CPU: {cpu}% | RAM: {ram}%" - sysmsg += f"\nDISK: {disk}% | UPTIME: {uptime}" - dlspeed_bytes = 0 - upspeed_bytes = 0 + dl_speed = 0 + up_speed = 0 for download in list(download_dict.values()): - spd = download.speed() if download.status() == TaskStatus.STATUS_DOWNLOADING: + spd = download.speed() if 'KB/s' in spd: - dlspeed_bytes += float(spd.split('K')[0]) * 1024 + dl_speed += float(spd.split('K')[0]) * 1024 elif 'MB/s' in spd: - dlspeed_bytes += float(spd.split('M')[0]) * 1048576 + dl_speed += float(spd.split('M')[0]) * 1048576 elif download.status() == TaskStatus.STATUS_UPLOADING: + spd = download.speed() if 'KB/s' in spd: - upspeed_bytes += float(spd.split('K')[0]) * 1024 + up_speed += float(spd.split('K')[0]) * 1024 elif 'MB/s' in spd: - upspeed_bytes += float(spd.split('M')[0]) * 1048576 - sysmsg += f"\nDL: {get_readable_file_size(dlspeed_bytes)}/s | UL: {get_readable_file_size(upspeed_bytes)}/s" + up_speed += float(spd.split('M')[0]) * 1048576 + sysmsg = f"CPU: {cpu_percent()}% | RAM: {virtual_memory().percent}%" + sysmsg += f"\nDISK: {disk_usage('/').percent}% | UPTIME: {get_readable_time(time.time() - botStartTime)}" + sysmsg += f"\nDL: {get_readable_file_size(dl_speed)}/s | UL: {get_readable_file_size(up_speed)}/s" return msg + sysmsg def get_readable_file_size(size_in_bytes) -> str: diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py index 80e77b84..fe290af7 100644 --- a/bot/helper/ext_utils/fs_utils.py +++ b/bot/helper/ext_utils/fs_utils.py @@ -8,9 +8,9 @@ from bot.helper.ext_utils.exceptions import CompressExceptionHandler ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2", - ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm", - ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr", - ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"] + ".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm", + ".cpio", ".cramfs", ".deb", ".dmg", ".fat", ".hfs", ".lzh", ".lzma", ".mbr", + ".msi", ".mslz", ".nsis", ".ntfs", ".rpm", ".squashfs", ".udf", ".vhd", ".xar"] def clean_download(path: str): if os.path.exists(path): @@ -27,6 +27,19 @@ def start_cleanup(): pass os.makedirs(DOWNLOAD_DIR) +def clean_target(path: str): + if os.path.exists(path): + if os.path.isdir(path): + try: + shutil.rmtree(path) + except: + pass + elif os.path.isfile(path): + try: + os.remove(path) + except: + pass + def clean_all(): try: shutil.rmtree(DOWNLOAD_DIR) diff --git a/bot/helper/status_utils/clone_status.py b/bot/helper/status_utils/clone_status.py index bd5f4723..0229d10a 100644 --- a/bot/helper/status_utils/clone_status.py +++ b/bot/helper/status_utils/clone_status.py @@ -2,10 +2,9 @@ class CloneStatus: - def __init__(self, obj, size, files, message, gid): + def __init__(self, obj, size, message, gid): self.__obj = obj self.__size = size - self.__files = files self.__gid = gid self.message = message @@ -24,12 +23,6 @@ def status(self): def name(self): return self.__obj.name - def files(self): - return self.__files - - def processed_files(self): - return self.__obj.total_files - def gid(self) -> str: return self.__gid diff --git a/bot/helper/status_utils/download_status.py b/bot/helper/status_utils/download_status.py index 9f3cfdc4..296eee65 100644 --- a/bot/helper/status_utils/download_status.py +++ b/bot/helper/status_utils/download_status.py @@ -8,7 +8,7 @@ def __init__(self, obj, size, listener, gid): self.message = listener.message def processed_bytes(self): - return self.__obj.downloaded_bytes + return self.__obj.processed_bytes def size_raw(self): return self.__size @@ -27,7 +27,7 @@ def gid(self) -> str: def progress_raw(self): try: - return self.__obj.downloaded_bytes / self.__size * 100 + return self.__obj.processed_bytes / self.__size * 100 except: return 0 @@ -38,14 +38,14 @@ def speed_raw(self): """ :return: Download speed in Bytes/Seconds """ - return self.__obj.dspeed() + return self.__obj.speed() def speed(self): return f'{get_readable_file_size(self.speed_raw())}/s' def eta(self): try: - seconds = (self.__size - self.__obj.downloaded_bytes) / self.speed_raw() + seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw() return f'{get_readable_time(seconds)}' except: return '-' diff --git a/bot/helper/status_utils/upload_status.py b/bot/helper/status_utils/upload_status.py index 0df98e7c..71b54bab 100644 --- a/bot/helper/status_utils/upload_status.py +++ b/bot/helper/status_utils/upload_status.py @@ -8,7 +8,7 @@ def __init__(self, obj, size, gid, listener): self.message = listener.message def processed_bytes(self): - return self.__obj.uploaded_bytes + return self.__obj.processed_bytes def size_raw(self): return self.__size @@ -24,7 +24,7 @@ def name(self): def progress_raw(self): try: - return self.__obj.uploaded_bytes / self.__size * 100 + return self.__obj.processed_bytes / self.__size * 100 except ZeroDivisionError: return 0 @@ -42,7 +42,7 @@ def speed(self): def eta(self): try: - seconds = (self.__size - self.__obj.uploaded_bytes) / self.speed_raw() + seconds = (self.__size - self.__obj.processed_bytes) / self.speed_raw() return f'{get_readable_time(seconds)}' except ZeroDivisionError: return '-' diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py index 4630481a..dc9659b1 100644 --- a/bot/helper/telegram_helper/bot_commands.py +++ b/bot/helper/telegram_helper/bot_commands.py @@ -18,7 +18,6 @@ def __init__(self): self.EvalCommand = 'eval' self.ExecCommand = 'exec' self.ClearLocalsCommand = 'clearlocals' - self.ExecHelpCommand = 'exechelp' self.PingCommand = 'ping' self.StatsCommand = 'stats' self.LogCommand = 'log' diff --git a/bot/helper/telegram_helper/button_builder.py b/bot/helper/telegram_helper/button_builder.py index 323e865f..19c511be 100644 --- a/bot/helper/telegram_helper/button_builder.py +++ b/bot/helper/telegram_helper/button_builder.py @@ -1,4 +1,4 @@ -from telegram import InlineKeyboardButton +from telegram import InlineKeyboardButton, InlineKeyboardMarkup class ButtonMaker: def __init__(self): @@ -13,4 +13,4 @@ def build_menu(self, n_cols, footer_buttons=None, header_buttons=None): menu.insert(0, header_buttons) if footer_buttons: menu.append(footer_buttons) - return menu + return InlineKeyboardMarkup(menu) diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index 2139b54c..9dafff01 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -12,7 +12,7 @@ def sendMessage(text: str, bot, message: Message): try: return bot.sendMessage(message.chat_id, reply_to_message_id=message.message_id, - text=text, parse_mode='HTMl', + text=text, parse_mode='HTML', disable_web_page_preview=True) except RetryAfter as r: LOGGER.warning(str(r)) @@ -27,7 +27,7 @@ def sendMarkup(text: str, bot, message: Message, reply_markup: InlineKeyboardMar return bot.sendMessage(message.chat_id, reply_to_message_id=message.message_id, text=text, reply_markup=reply_markup, - parse_mode='HTMl', disable_web_page_preview=True) + parse_mode='HTML', disable_web_page_preview=True) except RetryAfter as r: LOGGER.warning(str(r)) time.sleep(r.retry_after * 1.5) @@ -40,7 +40,7 @@ def editMessage(text: str, message: Message, reply_markup=None): try: bot.editMessageText(text=text, message_id=message.message_id, chat_id=message.chat.id, - reply_markup=reply_markup, parse_mode='HTMl', + reply_markup=reply_markup, parse_mode='HTML', disable_web_page_preview=True) except RetryAfter as r: LOGGER.warning(str(r)) diff --git a/bot/modules/auth.py b/bot/modules/auth.py index 968d5bdf..65b76495 100644 --- a/bot/modules/auth.py +++ b/bot/modules/auth.py @@ -7,21 +7,13 @@ from bot.helper.ext_utils.database import DatabaseHelper def authorize(update, context): + user_id = "" reply_message = update.message.reply_to_message if len(context.args) == 1: - # Authorize an user in private user_id = int(context.args[0]) - if user_id in AUTHORIZED_CHATS: - msg = 'Already authorized' - elif DATABASE_URL is not None: - msg = DatabaseHelper().auth_user(user_id) - AUTHORIZED_CHATS.add(user_id) - else: - AUTHORIZED_CHATS.add(user_id) - msg = 'Authorization granted' elif reply_message: - # Authorize an user by replying user_id = reply_message.from_user.id + if user_id: if user_id in AUTHORIZED_CHATS: msg = 'Already authorized' elif DATABASE_URL is not None: @@ -31,7 +23,6 @@ def authorize(update, context): AUTHORIZED_CHATS.add(user_id) msg = 'Authorization granted' else: - # Authorize a chat chat_id = update.effective_chat.id if chat_id in AUTHORIZED_CHATS: msg = 'Already authorized' @@ -44,21 +35,13 @@ def authorize(update, context): sendMessage(msg, context.bot, update.message) def unauthorize(update, context): + user_id = "" reply_message = update.message.reply_to_message if len(context.args) == 1: - # Unauthorize an user in private user_id = int(context.args[0]) - if user_id in AUTHORIZED_CHATS: - if DATABASE_URL is not None: - msg = DatabaseHelper().unauth_user(user_id) - else: - msg = 'Authorization revoked' - AUTHORIZED_CHATS.remove(user_id) - else: - msg = 'Already unauthorized' elif reply_message: - # Unauthorize an user by replying user_id = reply_message.from_user.id + if user_id: if user_id in AUTHORIZED_CHATS: if DATABASE_URL is not None: msg = DatabaseHelper().unauth_user(user_id) @@ -68,7 +51,6 @@ def unauthorize(update, context): else: msg = 'Already unauthorized' else: - # Unauthorize a chat chat_id = update.effective_chat.id if chat_id in AUTHORIZED_CHATS: if DATABASE_URL is not None: diff --git a/bot/modules/cancel.py b/bot/modules/cancel.py index 9ba4eb82..f3588ed2 100644 --- a/bot/modules/cancel.py +++ b/bot/modules/cancel.py @@ -12,7 +12,8 @@ def cancelNode(update, context): gid = context.args[0] dl = getDownloadByGid(gid) if not dl: - return sendMessage(f"GID: {gid} not found", context.bot, update.message) + sendMessage(f"GID: {gid} not found", context.bot, update.message) + return elif update.message.reply_to_message: task_message = update.message.reply_to_message with download_dict_lock: @@ -21,11 +22,14 @@ def cancelNode(update, context): else: dl = None if not dl: - return sendMessage("Not an active task", context.bot, update.message) + sendMessage("Not an active task", context.bot, update.message) + return elif len(context.args) == 0: - return sendMessage("Send a GID along with command", context.bot, update.message) + sendMessage("Send a GID along with command", context.bot, update.message) + return if OWNER_ID != user_id and dl.message.from_user.id != user_id: - return sendMessage("Not your task", context.bot, update.message) + sendMessage("Not your task", context.bot, update.message) + return dl.download().cancel_task() cancel_handler = CommandHandler(BotCommands.CancelCommand, cancelNode, diff --git a/bot/modules/clone.py b/bot/modules/clone.py index 3e175e41..b84656db 100644 --- a/bot/modules/clone.py +++ b/bot/modules/clone.py @@ -72,7 +72,7 @@ def cloneNode(update, context): else: drive = GoogleDriveHelper(name) gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12)) - clone_status = CloneStatus(drive, size, files, update.message, gid) + clone_status = CloneStatus(drive, size, update.message, gid) with download_dict_lock: download_dict[update.message.message_id] = clone_status sendStatusMessage(update.message, context.bot) diff --git a/bot/modules/compress.py b/bot/modules/compress.py index 7cd554f3..964470d2 100644 --- a/bot/modules/compress.py +++ b/bot/modules/compress.py @@ -14,7 +14,7 @@ from bot.helper.drive_utils.gdriveTools import GoogleDriveHelper from bot.helper.ext_utils.bot_utils import is_gdrive_link, is_appdrive_link, is_gdtot_link from bot.helper.ext_utils.exceptions import CompressExceptionHandler, DDLExceptionHandler -from bot.helper.ext_utils.fs_utils import clean_download, get_base_name, get_path_size +from bot.helper.ext_utils.fs_utils import clean_download, clean_target, get_base_name, get_path_size from bot.helper.status_utils.archive_status import ArchiveStatus from bot.helper.status_utils.extract_status import ExtractStatus from bot.helper.status_utils.upload_status import UploadStatus @@ -26,10 +26,11 @@ class CompressListener: def __init__(self, bot, message, is_archive=False, is_extract=False, pswd=None): self.bot = bot self.message = message - self.uid = self.message.message_id + self.uid = message.message_id self.is_archive = is_archive self.is_extract = is_extract self.pswd = pswd + self.dir = f'{DOWNLOAD_DIR}{self.uid}' self.suproc = None def clean(self): @@ -45,10 +46,10 @@ def onDownloadComplete(self): download = download_dict[self.uid] name = str(download.name()).replace('/', '') gid = download.gid() - m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}' + m_path = f'{self.dir}/{name}' size = get_path_size(m_path) if self.is_archive: - path = m_path + ".zip" + path = f"{m_path}.zip" with download_dict_lock: download_dict[self.uid] = ArchiveStatus(name, size, gid, self) LOGGER.info(f"Archiving: {name}") @@ -59,14 +60,7 @@ def onDownloadComplete(self): self.suproc.wait() if self.suproc.returncode == -9: return - elif self.suproc.returncode != 0: - LOGGER.error("Failed to archive the data") - path = f'{DOWNLOAD_DIR}{self.uid}/{name}' - if self.suproc.returncode == 0: - try: - shutil.rmtree(m_path) - except: - os.remove(m_path) + clean_target(m_path) elif self.is_extract: try: if os.path.isfile(m_path): @@ -75,89 +69,78 @@ def onDownloadComplete(self): with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, size, gid, self) if os.path.isdir(m_path): + path = m_path for dirpath, subdir, files in os.walk(m_path, topdown=False): for file_ in files: - if file_.endswith((".zip", ".7z")) or re.search(r'\.part0*1\.rar$|\.7z\.0*1$|\.zip\.0*1$', file_) \ - or (file_.endswith(".rar") and not re.search(r'\.part\d+\.rar$', file_)): - m_path = os.path.join(dirpath, file_) + if re.search(r'\.part0*1\.rar$|\.7z\.0*1$|\.zip\.0*1$|\.zip$|\.7z$|^.(?!.*\.part\d+\.rar)(?=.*\.rar$)', file_): + f_path = os.path.join(dirpath, file_) if self.pswd is not None: - self.suproc = subprocess.Popen(["7z", "x", f"-p{self.pswd}", m_path, f"-o{dirpath}", "-aot"]) + self.suproc = subprocess.Popen(["7z", "x", f"-p{self.pswd}", f_path, f"-o{dirpath}", "-aot"]) else: - self.suproc = subprocess.Popen(["7z", "x", m_path, f"-o{dirpath}", "-aot"]) + self.suproc = subprocess.Popen(["7z", "x", f_path, f"-o{dirpath}", "-aot"]) self.suproc.wait() if self.suproc.returncode == -9: return elif self.suproc.returncode != 0: - LOGGER.error("Failed to extract the archive") - if self.suproc.returncode == 0: + LOGGER.error("Failed to extract the split archive") + if self.suproc is not None and self.suproc.returncode == 0: for file_ in files: - if file_.endswith((".rar", ".zip", ".7z")) or \ - re.search(r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$', file_): + if re.search(r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$|\.zip$|\.rar$|\.7z$', file_): del_path = os.path.join(dirpath, file_) - os.remove(del_path) - path = f'{DOWNLOAD_DIR}{self.uid}/{name}' + try: + os.remove(del_path) + except: + return else: if self.pswd is not None: - self.suproc = subprocess.Popen(["bash", "pextract", m_path, self.pswd]) + self.suproc = subprocess.Popen(["7z", "x", f"-p{self.pswd}", m_path, f"-o{path}", "-aot"]) else: - self.suproc = subprocess.Popen(["bash", "extract", m_path]) + self.suproc = subprocess.Popen(["7z", "x", m_path, f"-o{path}", "-aot"]) self.suproc.wait() if self.suproc.returncode == -9: return elif self.suproc.returncode == 0: - os.remove(m_path) + try: + os.remove(m_path) + except: + return else: LOGGER.error("Failed to extract the archive") - path = f'{DOWNLOAD_DIR}{self.uid}/{name}' + path = m_path except CompressExceptionHandler as err: LOGGER.info(err) - path = f'{DOWNLOAD_DIR}{self.uid}/{name}' + path = m_path else: - path = f'{DOWNLOAD_DIR}{self.uid}/{name}' - up_name = path.rsplit('/', 1)[-1] - up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' + path = m_path + up_dir, up_name = path.rsplit('/', 1) + up_path = f'{up_dir}/{up_name}' size = get_path_size(up_path) LOGGER.info(f"Uploading: {up_name}") - drive = GoogleDriveHelper(up_name, self) + drive = GoogleDriveHelper(up_name, up_dir, size, self) upload_status = UploadStatus(drive, size, gid, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name) - def onDownloadError(self, error): - error = error.replace('<', '').replace('>', '') - clean_download(f'{DOWNLOAD_DIR}{self.uid}') - with download_dict_lock: - try: - del download_dict[self.uid] - except Exception as e: - LOGGER.error(str(e)) - count = len(download_dict) - sendMessage(error, self.bot, self.message) - if count == 0: - self.clean() - else: - update_all_messages() - - def onUploadComplete(self, link: str, size, files, folders, typ, name: str): + def onUploadComplete(self, link: str, size, files, folders, typ, name): msg = f'Name: {escape(name)}' msg += f'\nSize: {size}' msg += f'\nType: {typ}' - if os.path.isdir(f'{DOWNLOAD_DIR}{self.uid}/{name}'): + if typ == "Folder": msg += f'\nSubFolders: {folders}' msg += f'\nFiles: {files}' msg += f'\n\nDrive Link' if INDEX_URL is not None: url_path = requests.utils.quote(f'{name}') url = f'{INDEX_URL}/{url_path}' - if os.path.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{name}'): + if typ == "Folder": url += '/' msg += f' | Index Link' else: msg += f' | Index Link' sendMessage(msg, self.bot, self.message) - clean_download(f'{DOWNLOAD_DIR}{self.uid}') + clean_download(self.dir) with download_dict_lock: try: del download_dict[self.uid] @@ -169,9 +152,24 @@ def onUploadComplete(self, link: str, size, files, folders, typ, name: str): else: update_all_messages() + def onDownloadError(self, error): + error = error.replace('<', '').replace('>', '') + clean_download(self.dir) + with download_dict_lock: + try: + del download_dict[self.uid] + except Exception as e: + LOGGER.error(str(e)) + count = len(download_dict) + sendMessage(error, self.bot, self.message) + if count == 0: + self.clean() + else: + update_all_messages() + def onUploadError(self, error): error = error.replace('<', '').replace('>', '') - clean_download(f'{DOWNLOAD_DIR}{self.uid}') + clean_download(self.dir) with download_dict_lock: try: del download_dict[self.uid] @@ -202,7 +200,9 @@ def _compress(bot, message, is_archive=False, is_extract=False, pswd=None): pswd_arg = mesg[0].split(' pswd: ') if len(pswd_arg) > 1: pswd = pswd_arg[1] - if reply_to: + else: + pswd = None + if reply_to is not None: link = reply_to.text.split(maxsplit=1)[0].strip() is_appdrive = is_appdrive_link(link) is_gdtot = is_gdtot_link(link) @@ -222,7 +222,7 @@ def _compress(bot, message, is_archive=False, is_extract=False, pswd=None): return sendMessage(str(e), bot, message) listener = CompressListener(bot, message, is_archive, is_extract, pswd) if is_gdrive_link(link): - threading.Thread(target=add_gd_download, args=(link, listener, is_appdrive, appdict, is_gdtot)).start() + threading.Thread(target=add_gd_download, args=(link, f'{DOWNLOAD_DIR}{listener.uid}', listener, is_appdrive, appdict, is_gdtot)).start() else: help_msg = 'Instructions\nSend a link along with command' help_msg += '\n\nSupported Sites\n• Google Drive\n• AppDrive\n• GDToT' diff --git a/bot/modules/eval.py b/bot/modules/eval.py index ccdb76d3..f5d481e9 100644 --- a/bot/modules/eval.py +++ b/bot/modules/eval.py @@ -103,25 +103,13 @@ def clear(update, context): del namespaces[update.message.chat_id] send("Cleared locals", bot, update) -def exechelp(update, context): - help_string = f''' -Executor -• /{BotCommands.EvalCommand}: Run code in Python -• /{BotCommands.ExecCommand}: Run commands in Exec -• /{BotCommands.ClearLocalsCommand}: Clear locals -''' - sendMessage(help_string, context.bot, update.message) - eval_handler = CommandHandler(BotCommands.EvalCommand, evaluate, filters=CustomFilters.owner_filter, run_async=True) exec_handler = CommandHandler(BotCommands.ExecCommand, execute, filters=CustomFilters.owner_filter, run_async=True) clear_handler = CommandHandler(BotCommands.ClearLocalsCommand, clear, filters=CustomFilters.owner_filter, run_async=True) -exechelp_handler = CommandHandler(BotCommands.ExecHelpCommand, exechelp, - filters=CustomFilters.owner_filter, run_async=True) dispatcher.add_handler(eval_handler) dispatcher.add_handler(exec_handler) dispatcher.add_handler(clear_handler) -dispatcher.add_handler(exechelp_handler) diff --git a/extract b/extract deleted file mode 100755 index 7500a407..00000000 --- a/extract +++ /dev/null @@ -1,199 +0,0 @@ -#!/bin/bash - -if [ $# -lt 1 ]; then - echo "Usage: $(basename $0) FILES" - exit 1 -fi - -extract() { - arg="$1" - cd "$(dirname "$arg")" || exit - case "$arg" in - *.tar.bz2) - tar xjf "$arg" --one-top-level - local code=$? - ;; - *.tar.gz) - tar xzf "$arg" --one-top-level - local code=$? - ;; - *.bz2) - bunzip2 "$arg" - local code=$? - ;; - *.gz) - gunzip "$arg" - local code=$? - ;; - *.tar) - tar xf "$arg" --one-top-level - local code=$? - ;; - *.tbz2) - (tar xjf "$arg" --one-top-level) - local code=$? - ;; - *.tgz) - tar xzf "$arg" --one-top-level - local code=$? - ;; - *.tar.xz) - a_dir=$(expr "$arg" : '\(.*\).tar.xz') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.zip) - a_dir=$(expr "$arg" : '\(.*\).zip') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.7z) - a_dir=$(expr "$arg" : '\(.*\).7z') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.Z) - uncompress "$arg" - local code=$? - ;; - *.rar) - a_dir=$(expr "$arg" : '\(.*\).rar') - mkdir "$a_dir" - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.iso) - a_dir=$(expr "$arg" : '\(.*\).iso') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.wim) - a_dir=$(expr "$arg" : '\(.*\).wim') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.cab) - a_dir=$(expr "$arg" : '\(.*\).cab') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.apm) - a_dir=$(expr "$arg" : '\(.*\).apm') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.arj) - a_dir=$(expr "$arg" : '\(.*\).arj') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.chm) - a_dir=$(expr "$arg" : '\(.*\).chm') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.cpio) - a_dir=$(expr "$arg" : '\(.*\).cpio') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.cramfs) - a_dir=$(expr "$arg" : '\(.*\).cramfs') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.deb) - a_dir=$(expr "$arg" : '\(.*\).deb') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.dmg) - a_dir=$(expr "$arg" : '\(.*\).dmg') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.fat) - a_dir=$(expr "$arg" : '\(.*\).fat') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.hfs) - a_dir=$(expr "$arg" : '\(.*\).hfs') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.lzh) - a_dir=$(expr "$arg" : '\(.*\).lzh') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.lzma) - a_dir=$(expr "$arg" : '\(.*\).lzma') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.lzma2) - a_dir=$(expr "$arg" : '\(.*\).lzma2') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.mbr) - a_dir=$(expr "$arg" : '\(.*\).mbr') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.msi) - a_dir=$(expr "$arg" : '\(.*\).msi') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.mslz) - a_dir=$(expr "$arg" : '\(.*\).mslz') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.nsis) - a_dir=$(expr "$arg" : '\(.*\).nsis') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.ntfs) - a_dir=$(expr "$arg" : '\(.*\).ntfs') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.rpm) - a_dir=$(expr "$arg" : '\(.*\).rpm') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.squashfs) - a_dir=$(expr "$arg" : '\(.*\).squashfs') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.udf) - a_dir=$(expr "$arg" : '\(.*\).udf') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.vhd) - a_dir=$(expr "$arg" : '\(.*\).vhd') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *.xar) - a_dir=$(expr "$arg" : '\(.*\).xar') - 7z x "$arg" -o"$a_dir" - local code=$? - ;; - *) - echo "'$arg' cannot be extracted via extract()" 1>&2 - exit 1 - ;; - esac - cd - || exit $? - exit $code -} - -extract "$1" diff --git a/pextract b/pextract deleted file mode 100755 index 7c53e08a..00000000 --- a/pextract +++ /dev/null @@ -1,200 +0,0 @@ -#!/bin/bash - -if [ $# -lt 1 ]; then - echo "Usage: $(basename $0) FILES" - exit 1 -fi - -extract() { - arg="$1" - pswd="$2" - cd "$(dirname "$arg")" || exit - case "$arg" in - *.tar.bz2) - tar xjf "$arg" --one-top-level - local code=$? - ;; - *.tar.gz) - tar xzf "$arg" --one-top-level - local code=$? - ;; - *.bz2) - bunzip2 "$arg" - local code=$? - ;; - *.gz) - gunzip "$arg" - local code=$? - ;; - *.tar) - tar xf "$arg" --one-top-level - local code=$? - ;; - *.tbz2) - (tar xjf "$arg" --one-top-level) - local code=$? - ;; - *.tgz) - tar xzf "$arg" --one-top-level - local code=$? - ;; - *.tar.xz) - a_dir=$(expr "$arg" : '\(.*\).tar.xz') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.zip) - a_dir=$(expr "$arg" : '\(.*\).zip') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.7z) - a_dir=$(expr "$arg" : '\(.*\).7z') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.Z) - uncompress "$arg" - local code=$? - ;; - *.rar) - a_dir=$(expr "$arg" : '\(.*\).rar') - mkdir "$a_dir" - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.iso) - a_dir=$(expr "$arg" : '\(.*\).iso') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.wim) - a_dir=$(expr "$arg" : '\(.*\).wim') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.cab) - a_dir=$(expr "$arg" : '\(.*\).cab') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.apm) - a_dir=$(expr "$arg" : '\(.*\).apm') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.arj) - a_dir=$(expr "$arg" : '\(.*\).arj') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.chm) - a_dir=$(expr "$arg" : '\(.*\).chm') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.cpio) - a_dir=$(expr "$arg" : '\(.*\).cpio') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.cramfs) - a_dir=$(expr "$arg" : '\(.*\).cramfs') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.deb) - a_dir=$(expr "$arg" : '\(.*\).deb') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.dmg) - a_dir=$(expr "$arg" : '\(.*\).dmg') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.fat) - a_dir=$(expr "$arg" : '\(.*\).fat') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.hfs) - a_dir=$(expr "$arg" : '\(.*\).hfs') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.lzh) - a_dir=$(expr "$arg" : '\(.*\).lzh') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.lzma) - a_dir=$(expr "$arg" : '\(.*\).lzma') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.lzma2) - a_dir=$(expr "$arg" : '\(.*\).lzma2') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.mbr) - a_dir=$(expr "$arg" : '\(.*\).mbr') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.msi) - a_dir=$(expr "$arg" : '\(.*\).msi') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.mslz) - a_dir=$(expr "$arg" : '\(.*\).mslz') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.nsis) - a_dir=$(expr "$arg" : '\(.*\).nsis') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.ntfs) - a_dir=$(expr "$arg" : '\(.*\).ntfs') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.rpm) - a_dir=$(expr "$arg" : '\(.*\).rpm') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.squashfs) - a_dir=$(expr "$arg" : '\(.*\).squashfs') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.udf) - a_dir=$(expr "$arg" : '\(.*\).udf') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.vhd) - a_dir=$(expr "$arg" : '\(.*\).vhd') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *.xar) - a_dir=$(expr "$arg" : '\(.*\).xar') - 7z x "$arg" -o"$a_dir" -p"$pswd" - local code=$? - ;; - *) - echo "'$arg' cannot be extracted via extract()" 1>&2 - exit 1 - ;; - esac - cd - || exit $? - exit $code -} - -extract "$1" "$2"