From 547f6c96614bb7f274d4e6d15cfb80eb3c6f3338 Mon Sep 17 00:00:00 2001 From: Levi <57452819+l3v11@users.noreply.github.com> Date: Fri, 3 Jun 2022 22:22:53 +0600 Subject: [PATCH] Add compress module - This adds support for archiving and extracting data from Google Drive - Add password support in compress module - Add command description in clone, compress and list module - Switch base image to Ubuntu 22.04 - Fix bugs - Tidy up --- Dockerfile | 9 +- README.md | 2 +- bot/__init__.py | 16 + bot/__main__.py | 53 +-- .../ddl_generator.py} | 12 +- bot/helper/download_utils/gd_downloader.py | 39 ++ bot/helper/drive_utils/gdriveTools.py | 333 ++++++++++++++++-- bot/helper/ext_utils/bot_utils.py | 73 +++- bot/helper/ext_utils/exceptions.py | 5 +- bot/helper/ext_utils/fs_utils.py | 131 +++++++ bot/helper/status_utils/archive_status.py | 34 ++ .../clone_status.py | 8 +- bot/helper/status_utils/download_status.py | 59 ++++ bot/helper/status_utils/extract_status.py | 34 ++ bot/helper/status_utils/upload_status.py | 59 ++++ bot/helper/telegram_helper/bot_commands.py | 2 + bot/modules/auth.py | 2 +- bot/modules/cancel.py | 20 +- bot/modules/clone.py | 32 +- bot/modules/compress.py | 235 ++++++++++++ bot/modules/count.py | 2 - bot/modules/delete.py | 2 - bot/modules/list.py | 7 +- bot/modules/permission.py | 6 +- bot/modules/status.py | 9 +- config_sample.env | 2 + extract | 199 +++++++++++ pextract | 200 +++++++++++ 28 files changed, 1468 insertions(+), 117 deletions(-) rename bot/helper/{ext_utils/parser.py => download_utils/ddl_generator.py} (89%) create mode 100644 bot/helper/download_utils/gd_downloader.py create mode 100644 bot/helper/ext_utils/fs_utils.py create mode 100644 bot/helper/status_utils/archive_status.py rename bot/helper/{ext_utils => status_utils}/clone_status.py (86%) create mode 100644 bot/helper/status_utils/download_status.py create mode 100644 bot/helper/status_utils/extract_status.py create mode 100644 bot/helper/status_utils/upload_status.py create mode 100644 bot/modules/compress.py create mode 100755 extract create mode 100755 pextract diff --git a/Dockerfile b/Dockerfile index 0d1dab46..5442b44b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,14 @@ -FROM python:3.9-slim +FROM ubuntu:22.04 WORKDIR /usr/src/app SHELL ["/bin/bash", "-c"] RUN chmod 777 /usr/src/app -RUN apt-get -qq update && \ - apt-get -qq install -y unzip +RUN apt-get -qq update && DEBIAN_FRONTEND="noninteractive" \ + apt-get -qq install -y locales python3 python3-pip \ + libmagic-dev p7zip-full unzip && locale-gen en_US.UTF-8 + +ENV LANG="en_US.UTF-8" LANGUAGE="en_US:en" COPY requirements.txt . RUN pip3 install --no-cache-dir -r requirements.txt diff --git a/README.md b/README.md index ac485552..224bb240 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ ## SearchX -> A simple Telegram Bot for searching data on Google Drive. Able to clone data from Drive / AppDrive / DriveApp / GDToT links. Supports MongoDB for storing authorized users record. +> A simple Telegram Bot for searching data on Google Drive. Able to clone data from Google Drive, AppDrive, DriveApp and GDToT links. Supports MongoDB for storing authorized users record.
diff --git a/bot/__init__.py b/bot/__init__.py index 2f5888f3..66e97f66 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -93,6 +93,14 @@ def get_config(name: str): LOGGER.error("DRIVE_FOLDER_ID env variable is missing") exit(1) +try: + DOWNLOAD_DIR = get_config('DOWNLOAD_DIR') + if not DOWNLOAD_DIR.endswith("/"): + DOWNLOAD_DIR = DOWNLOAD_DIR + '/' +except: + LOGGER.error("DOWNLOAD_DIR env variable is missing") + exit(1) + try: DATABASE_URL = get_config('DATABASE_URL') if len(DATABASE_URL) == 0: @@ -143,6 +151,14 @@ def get_config(name: str): except: CLONE_LIMIT = None +try: + COMPRESS_LIMIT = get_config('COMPRESS_LIMIT') + if len(COMPRESS_LIMIT) == 0: + raise KeyError + COMPRESS_LIMIT = float(COMPRESS_LIMIT) +except: + COMPRESS_LIMIT = None + try: TOKEN_JSON_URL = get_config('TOKEN_JSON_URL') if len(TOKEN_JSON_URL) == 0: diff --git a/bot/__main__.py b/bot/__main__.py index e359e60b..15036838 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,12 +1,14 @@ +import signal import time -from psutil import cpu_percent, cpu_count, disk_usage, virtual_memory +from psutil import cpu_percent, cpu_count, disk_usage, virtual_memory, net_io_counters from telegram import InlineKeyboardMarkup from telegram.ext import CommandHandler from bot import LOGGER, botStartTime, AUTHORIZED_CHATS, DEST_DRIVES, TELEGRAPH, dispatcher, updater -from bot.modules import auth, cancel, clone, count, delete, eval, list, permission, shell, status +from bot.modules import auth, cancel, clone, compress, count, delete, eval, list, permission, shell, status from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time +from bot.helper.ext_utils.fs_utils import start_cleanup, exit_clean_up from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_builder import ButtonMaker from bot.helper.telegram_helper.filters import CustomFilters @@ -18,10 +20,8 @@ def start(update, context): sendMessage("Access granted", context.bot, update.message) else: sendMessage("I'm alive :)", context.bot, update.message) - LOGGER.info('Granted: {} [{}]'.format(update.message.from_user.first_name, update.message.from_user.id)) else: sendMessage("Access denied", context.bot, update.message) - LOGGER.info('Denied: {} [{}]'.format(update.message.from_user.first_name, update.message.from_user.id)) def listkeys(update, context): keys = '' @@ -37,20 +37,25 @@ def ping(update, context): def stats(update, context): uptime = get_readable_time(time.time() - botStartTime) - total, used, free, disk= disk_usage('/') + total, used, free, disk = disk_usage('/') total = get_readable_file_size(total) used = get_readable_file_size(used) free = get_readable_file_size(free) + sent = get_readable_file_size(net_io_counters().bytes_sent) + recv = get_readable_file_size(net_io_counters().bytes_recv) cpu = cpu_percent(interval=0.5) ram = virtual_memory().percent p_core = cpu_count(logical=False) - t_core = cpu_count(logical=True) - stats = "⚙️ SYSTEM STATISTICS" \ - f"\n\nTotal Disk Space: {total}" \ - f"\nUsed: {used} | Free: {free}" \ - f"\n\nPhysical Cores: {p_core} | Total Cores: {t_core}" \ - f"\n\nCPU: {cpu}% | RAM: {ram}%" \ - f"\nDISK: {disk}% | Uptime: {uptime}" + l_core = cpu_count(logical=True) + stats = '⚙️ SYSTEM STATISTICS' \ + f'\n\nTotal Disk Space: {total}' \ + f'\nUsed: {used} | Free: {free}' \ + f'\n\nUpload: {sent}' \ + f'\nDownload: {recv}' \ + f'\n\nPhysical Cores: {p_core}' \ + f'\nLogical Cores: {l_core}' \ + f'\n\nCPU: {cpu}% | RAM: {ram}%' \ + f'\nDISK: {disk}% | Uptime: {uptime}' sendMessage(stats, context.bot, update.message) def log(update, context): @@ -67,25 +72,25 @@ def log(update, context):{link}
", listener.bot, listener.message)
+ LOGGER.info(f"Checking: {link}")
+ gd = GoogleDriveHelper()
+ res, size, name, files = gd.helper(link)
+ deleteMessage(listener.bot, msg)
+ if res != "":
+ return sendMessage(res, listener.bot, listener.message)
+ if COMPRESS_LIMIT is not None:
+ if size > COMPRESS_LIMIT * 1024**3:
+ msg2 = f"Name: {name}
"
+ msg2 += f"\nSize: {get_readable_file_size(size)}"
+ msg2 += f"\nLimit: {COMPRESS_LIMIT} GB"
+ msg2 += "\n\n⚠️ Task failed"
+ return sendMessage(msg2, listener.bot, listener.message)
+ LOGGER.info(f"Downloading: {name}")
+ drive = GoogleDriveHelper(name, listener)
+ gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=12))
+ download_status = DownloadStatus(drive, size, listener, gid)
+ with download_dict_lock:
+ download_dict[listener.uid] = download_status
+ sendStatusMessage(listener.message, listener.bot)
+ drive.download(link)
+ if is_appdrive:
+ if appdict.get('link_type') == 'login':
+ LOGGER.info(f"Deleting: {link}")
+ drive.deleteFile(link)
+ elif is_gdtot:
+ LOGGER.info(f"Deleting: {link}")
+ drive.deleteFile(link)
diff --git a/bot/helper/drive_utils/gdriveTools.py b/bot/helper/drive_utils/gdriveTools.py
index 76e61be9..1b3647fe 100644
--- a/bot/helper/drive_utils/gdriveTools.py
+++ b/bot/helper/drive_utils/gdriveTools.py
@@ -5,8 +5,8 @@
import requests
import time
-import urllib.parse as urlparse
-from urllib.parse import parse_qs
+from io import FileIO
+from urllib.parse import parse_qs, urlparse
from random import randrange
from tenacity import retry, wait_exponential, stop_after_attempt, \
retry_if_exception_type, before_log, RetryError
@@ -19,10 +19,12 @@
from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
+from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
-from bot import LOGGER, DRIVE_NAMES, DRIVE_IDS, INDEX_URLS, PARENT_ID, \
+from bot import LOGGER, DOWNLOAD_DIR, DRIVE_NAMES, DRIVE_IDS, INDEX_URLS, PARENT_ID, \
IS_TEAM_DRIVE, TELEGRAPH, USE_SERVICE_ACCOUNTS, INDEX_URL, DEST_DRIVES
-from bot.helper.ext_utils.bot_utils import *
+from bot.helper.ext_utils.bot_utils import SetInterval, get_readable_file_size
+from bot.helper.ext_utils.fs_utils import get_mime_type, get_path_size
from bot.helper.telegram_helper.button_builder import ButtonMaker
logging.getLogger('googleapiclient.discovery').setLevel(logging.ERROR)
@@ -33,30 +35,69 @@
TELEGRAPH_LIMIT = 60
class GoogleDriveHelper:
- def __init__(self, name=None):
+ def __init__(self, name=None, listener=None):
# Check https://developers.google.com/drive/scopes for all available scopes
self.__OAUTH_SCOPE = ['https://www.googleapis.com/auth/drive']
self.__G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder"
self.__G_DRIVE_BASE_DOWNLOAD_URL = "https://drive.google.com/uc?id={}&export=download"
self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL = "https://drive.google.com/drive/folders/{}"
+ self.__listener = listener
self.__service = self.authorize()
- self.title = "SearchX"
- self.author_name = "Levi"
- self.author_url = "https://t.me/l3v11"
- self.telegraph_content = []
- self.path = []
- self.response = {}
+ self._file_uploaded_bytes = 0
+ self._file_downloaded_bytes = 0
+ self.uploaded_bytes = 0
+ self.downloaded_bytes = 0
self.start_time = 0
+ self.total_time = 0
+ self.dtotal_time = 0
+ self.is_uploading = False
+ self.is_downloading = False
+ self.is_cloning = False
self.is_cancelled = False
+ self.is_errored = False
+ self.status = None
+ self.dstatus = None
+ self.updater = None
self.name = name
+ self.update_interval = 3
self.total_bytes = 0
self.total_files = 0
self.total_folders = 0
self.transferred_size = 0
self.sa_count = 0
self.alt_auth = False
+ self.response = {}
+ self.path = []
+ self.telegraph_content = []
+ self.title = "SearchX"
+ self.author_name = "Levi"
+ self.author_url = "https://t.me/l3v11"
+
+ def speed(self):
+ """
+ It calculates the average upload speed and returns it in bytes/seconds unit
+ :return: Upload speed in bytes/second
+ """
+ try:
+ return self.uploaded_bytes / self.total_time
+ except ZeroDivisionError:
+ return 0
+
+ def dspeed(self):
+ """
+ It calculates the average download speed and returns it in bytes/seconds unit
+ :return: Download speed in bytes/second
+ """
+ try:
+ return self.downloaded_bytes / self.dtotal_time
+ except ZeroDivisionError:
+ return 0
def cspeed(self):
+ """
+ It calculates the average clone speed and returns it in bytes/seconds unit
+ :return: Clone speed in bytes/second
+ """
try:
return self.transferred_size / int(time.time() - self.start_time)
except ZeroDivisionError:
@@ -99,7 +140,7 @@ def getIdFromUrl(link: str):
if res is None:
raise IndexError("Drive ID not found")
return res.group(3)
- parsed = urlparse.urlparse(link)
+ parsed = urlparse(link)
return parse_qs(parsed.query)['id'][0]
def deleteFile(self, link: str):
@@ -114,7 +155,7 @@ def deleteFile(self, link: str):
res = self.__service.files().delete(
fileId=file_id,
supportsAllDrives=IS_TEAM_DRIVE).execute()
- msg += "Permanently deleted"
+ msg = "Permanently deleted"
except HttpError as err:
if "File not found" in str(err):
msg = "File not found"
@@ -172,12 +213,13 @@ def setPerm(self, link, access):
try:
if access != '':
self.__set_permission_email(file_id, access)
- msg += f"Added {access}
as viewer"
+ msg = f"Added {access}
as viewer"
else:
self.__set_permission_public(file_id)
- msg += "Set permission to Anyone with the link
"
+ msg = "Set permission to Anyone with the link
"
except HttpError as err:
err = str(err).replace('>', '').replace('<', '')
+ LOGGER.error(err)
if "File not found" in str(err):
msg = "File not found"
elif "insufficientFilePermissions" in str(err):
@@ -188,7 +230,6 @@ def setPerm(self, link, access):
msg = "Insufficient file permissions"
else:
msg = str(err)
- LOGGER.error(msg)
return msg
@retry(wait=wait_exponential(multiplier=2, min=3, max=6),
@@ -258,6 +299,7 @@ def getFilesByFolderId(self, folder_id):
return files
def clone(self, link, key):
+ self.is_cloning = True
self.start_time = time.time()
self.total_files = 0
self.total_folders = 0
@@ -285,9 +327,9 @@ def clone(self, link, key):
self.cloneFolder(meta.get('name'), meta.get('name'), meta.get('id'), dir_id)
durl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id)
if self.is_cancelled:
- LOGGER.info(f"Deleting: {name}")
+ LOGGER.info(f"Deleting cloned data from Drive")
self.deleteFile(durl)
- return "The task has been cancelled"
+ return "The clone task has been cancelled"
msg += f'Name: {name}
'
msg += f'\nSize: {get_readable_file_size(self.transferred_size)}'
msg += f'\nType: Folder'
@@ -315,6 +357,7 @@ def clone(self, link, key):
LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
+ LOGGER.error(err)
if "User rate limit exceeded" in str(err):
msg = "User rate limit exceeded"
elif "File not found" in str(err):
@@ -325,7 +368,6 @@ def clone(self, link, key):
msg = "File not found"
else:
msg = str(err)
- LOGGER.error(msg)
return msg
def cloneFolder(self, name, local_path, folder_id, parent_id):
@@ -395,6 +437,7 @@ def count(self, link):
LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
+ LOGGER.error(err)
if "File not found" in str(err):
token_service = self.alt_authorize()
if token_service is not None:
@@ -403,7 +446,6 @@ def count(self, link):
msg = "File not found"
else:
msg = str(err)
- LOGGER.error(msg)
return msg
def gDrive_file(self, filee):
@@ -439,7 +481,6 @@ def helper(self, link):
try:
meta = self.getFileMetadata(file_id)
name = meta.get('name')
- LOGGER.info(f"Checking: {name}")
if meta.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE:
self.gDrive_directory(meta)
else:
@@ -452,6 +493,7 @@ def helper(self, link):
LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
err = str(err).replace('>', '').replace('<', '')
+ LOGGER.error(err)
if "File not found" in str(err):
token_service = self.alt_authorize()
if token_service is not None:
@@ -460,7 +502,6 @@ def helper(self, link):
msg = "File not found"
else:
msg = str(err)
- LOGGER.error(msg)
return msg, "", "", ""
return "", size, name, files
@@ -622,6 +663,252 @@ def edit_page(self, acc, content, path):
time.sleep(e.retry_after)
self.edit_page(acc, content, path)
+ def upload(self, file_name: str):
+ self.is_downloading = False
+ self.is_uploading = True
+ file_dir = f"{DOWNLOAD_DIR}{self.__listener.message.message_id}"
+ file_path = f"{file_dir}/{file_name}"
+ size = get_readable_file_size(get_path_size(file_path))
+ self.updater = SetInterval(self.update_interval, self._on_upload_progress)
+ try:
+ if os.path.isfile(file_path):
+ mime_type = get_mime_type(file_path)
+ link = self.upload_file(file_path, file_name, mime_type, PARENT_ID)
+ if self.is_cancelled:
+ return
+ if link is None:
+ raise Exception("The upload task has been manually cancelled")
+ else:
+ mime_type = 'Folder'
+ dir_id = self.create_directory(os.path.basename(os.path.abspath(file_name)), PARENT_ID)
+ result = self.upload_dir(file_path, dir_id)
+ if result is None:
+ raise Exception("The upload task has been manually cancelled")
+ link = f"https://drive.google.com/folderview?id={dir_id}"
+ if self.is_cancelled:
+ return
+ except Exception as e:
+ if isinstance(e, RetryError):
+ LOGGER.info(f"Total attempts: {e.last_attempt.attempt_number}")
+ err = e.last_attempt.exception()
+ else:
+ err = e
+ LOGGER.error(err)
+ self.__listener.onUploadError(str(err))
+ self.is_errored = True
+ finally:
+ self.updater.cancel()
+ if self.is_cancelled and not self.is_errored:
+ if mime_type == 'Folder':
+ LOGGER.info("Deleting uploaded data from Drive")
+ link = f"https://drive.google.com/folderview?id={dir_id}"
+ self.deleteFile(link)
+ return
+ elif self.is_errored:
+ return
+ self.__listener.onUploadComplete(link, size, self.total_files, self.total_folders, mime_type, self.name)
+
+ def upload_dir(self, input_directory, parent_id):
+ list_dirs = os.listdir(input_directory)
+ if len(list_dirs) == 0:
+ return parent_id
+ new_id = None
+ for item in list_dirs:
+ current_file_name = os.path.join(input_directory, item)
+ if os.path.isdir(current_file_name):
+ current_dir_id = self.create_directory(item, parent_id)
+ new_id = self.upload_dir(current_file_name, current_dir_id)
+ self.total_folders += 1
+ else:
+ mime_type = get_mime_type(current_file_name)
+ file_name = current_file_name.split("/")[-1]
+ # 'current_file_name' will have the full path
+ self.upload_file(current_file_name, file_name, mime_type, parent_id)
+ self.total_files += 1
+ new_id = parent_id
+ if self.is_cancelled:
+ break
+ return new_id
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6),
+ stop=stop_after_attempt(3),
+ retry=(retry_if_exception_type(HttpError) | retry_if_exception_type(IOError)),
+ before=before_log(LOGGER, logging.DEBUG))
+ def upload_file(self, file_path, file_name, mime_type, parent_id):
+ file_metadata = {
+ 'name': file_name,
+ 'mimeType': mime_type
+ }
+ if parent_id is not None:
+ file_metadata['parents'] = [parent_id]
+ if os.path.getsize(file_path) == 0:
+ media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=False)
+ response = self.__service.files().create(
+ supportsAllDrives=True,
+ body=file_metadata,
+ media_body=media_body).execute()
+ if not IS_TEAM_DRIVE:
+ self.__set_permission_public(response['id'])
+ drive_file = self.__service.files().get(
+ supportsAllDrives=True,
+ fileId=response['id']).execute()
+ download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id'))
+ return download_url
+ media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=True,
+ chunksize=50 * 1024 * 1024)
+ drive_file = self.__service.files().create(
+ supportsAllDrives=True,
+ body=file_metadata,
+ media_body=media_body)
+ response = None
+ while response is None:
+ if self.is_cancelled:
+ break
+ try:
+ self.status, response = drive_file.next_chunk()
+ except HttpError as err:
+ if err.resp.get('content-type', '').startswith('application/json'):
+ reason = json.loads(err.content).get('error').get('errors')[0].get('reason')
+ if reason not in ['userRateLimitExceeded', 'dailyLimitExceeded']:
+ raise err
+ if USE_SERVICE_ACCOUNTS:
+ self.switchServiceAccount()
+ return self.upload_file(file_path, file_name, mime_type, parent_id)
+ else:
+ LOGGER.error(f"Warning: {reason}")
+ raise err
+ if self.is_cancelled:
+ return
+ self._file_uploaded_bytes = 0
+ if not IS_TEAM_DRIVE:
+ self.__set_permission_public(response['id'])
+ drive_file = self.__service.files().get(
+ supportsAllDrives=True,
+ fileId=response['id']).execute()
+ download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id'))
+ return download_url
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6),
+ stop=stop_after_attempt(3),
+ retry=retry_if_exception_type(HttpError),
+ before=before_log(LOGGER, logging.DEBUG))
+ def _on_upload_progress(self):
+ if self.status is not None:
+ chunk_size = self.status.total_size * self.status.progress() - self._file_uploaded_bytes
+ self._file_uploaded_bytes = self.status.total_size * self.status.progress()
+ self.uploaded_bytes += chunk_size
+ self.total_time += self.update_interval
+
+ def download(self, link):
+ self.is_downloading = True
+ file_id = self.getIdFromUrl(link)
+ self.updater = SetInterval(self.update_interval, self._on_download_progress)
+ try:
+ meta = self.getFileMetadata(file_id)
+ path = f"{DOWNLOAD_DIR}{self.__listener.uid}/"
+ if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.download_folder(file_id, path, meta.get('name'))
+ else:
+ os.makedirs(path)
+ self.download_file(file_id, path, meta.get('name'), meta.get('mimeType'))
+ except Exception as err:
+ if isinstance(err, RetryError):
+ LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}")
+ err = err.last_attempt.exception()
+ err = str(err).replace('>', '').replace('<', '')
+ LOGGER.error(err)
+ if "downloadQuotaExceeded" in str(err):
+ err = "Download quota exceeded."
+ elif "File not found" in str(err):
+ token_service = self.alt_authorize()
+ if token_service is not None:
+ self.__service = token_service
+ self.updater.cancel()
+ return self.download(link)
+ self.__listener.onDownloadError(err)
+ self.is_cancelled = True
+ finally:
+ self.updater.cancel()
+ if self.is_cancelled:
+ return
+ self.__listener.onDownloadComplete()
+
+ def download_folder(self, folder_id, path, folder_name):
+ folder_name = folder_name.replace('/', '')
+ if not os.path.exists(path + folder_name):
+ os.makedirs(path + folder_name)
+ path += folder_name + '/'
+ result = self.getFilesByFolderId(folder_id)
+ if len(result) == 0:
+ return
+ result = sorted(result, key=lambda k: k['name'])
+ for item in result:
+ file_id = item['id']
+ filename = item['name']
+ shortcut_details = item.get('shortcutDetails')
+ if shortcut_details is not None:
+ file_id = shortcut_details['targetId']
+ mime_type = shortcut_details['targetMimeType']
+ else:
+ mime_type = item.get('mimeType')
+ if mime_type == self.__G_DRIVE_DIR_MIME_TYPE:
+ self.download_folder(file_id, path, filename)
+ elif not os.path.isfile(path + filename):
+ self.download_file(file_id, path, filename, mime_type)
+ if self.is_cancelled:
+ break
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6),
+ stop=stop_after_attempt(3),
+ retry=(retry_if_exception_type(HttpError) | retry_if_exception_type(IOError)),
+ before=before_log(LOGGER, logging.DEBUG))
+ def download_file(self, file_id, path, filename, mime_type):
+ request = self.__service.files().get_media(fileId=file_id)
+ filename = filename.replace('/', '')
+ fh = FileIO('{}{}'.format(path, filename), 'wb')
+ downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024)
+ done = False
+ while not done:
+ if self.is_cancelled:
+ fh.close()
+ break
+ try:
+ self.dstatus, done = downloader.next_chunk()
+ except HttpError as err:
+ if err.resp.get('content-type', '').startswith('application/json'):
+ reason = json.loads(err.content).get('error').get('errors')[0].get('reason')
+ if reason not in ['downloadQuotaExceeded', 'dailyLimitExceeded']:
+ raise err
+ if USE_SERVICE_ACCOUNTS:
+ if self.sa_count == len(os.listdir("accounts")) or self.sa_count > 50:
+ self.is_cancelled = True
+ raise err
+ else:
+ self.switchServiceAccount()
+ return self.download_file(file_id, path, filename, mime_type)
+ else:
+ LOGGER.error(f"Warning: {reason}")
+ raise err
+ self._file_downloaded_bytes = 0
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6),
+ stop=stop_after_attempt(3),
+ retry=retry_if_exception_type(HttpError),
+ before=before_log(LOGGER, logging.DEBUG))
+ def _on_download_progress(self):
+ if self.dstatus is not None:
+ chunk_size = self.dstatus.total_size * self.dstatus.progress() - self._file_downloaded_bytes
+ self._file_downloaded_bytes = self.dstatus.total_size * self.dstatus.progress()
+ self.downloaded_bytes += chunk_size
+ self.dtotal_time += self.update_interval
+
def cancel_task(self):
self.is_cancelled = True
- LOGGER.info(f"Cancelling: {self.name}")
+ if self.is_downloading:
+ LOGGER.info(f"Cancelling download: {self.name}")
+ self.__listener.onDownloadError("The download task has been cancelled")
+ elif self.is_cloning:
+ LOGGER.info(f"Cancelling clone: {self.name}")
+ elif self.is_uploading:
+ LOGGER.info(f"Cancelling upload: {self.name}")
+ self.__listener.onUploadError("The upload task has been cancelled")
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index 13aa0e10..4e6a342f 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -3,12 +3,21 @@
import time
from html import escape
-from psutil import virtual_memory, cpu_percent
+from psutil import virtual_memory, cpu_percent, disk_usage
from bot.helper.telegram_helper.bot_commands import BotCommands
-from bot import botStartTime, download_dict, download_dict_lock
+from bot import botStartTime, DOWNLOAD_DIR, download_dict, download_dict_lock
-SIZE_UNITS = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB']
+SIZE_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
+
+URL_REGEX = r'(?:(?:https?|ftp):\/\/)?[\w/\-?=%.]+\.[\w/\-?=%.]+'
+
+class TaskStatus:
+ STATUS_UPLOADING = "Uploading...📤"
+ STATUS_DOWNLOADING = "Downloading...📥"
+ STATUS_CLONING = "Cloning...♻️"
+ STATUS_ARCHIVING = "Archiving...🔐"
+ STATUS_EXTRACTING = "Extracting...📂"
class SetInterval:
def __init__(self, interval, action):
@@ -30,8 +39,11 @@ def cancel(self):
def getDownloadByGid(gid):
with download_dict_lock:
for dl in list(download_dict.values()):
- if dl.gid() == gid:
- return dl
+ status = dl.status()
+ if status not in [TaskStatus.STATUS_ARCHIVING,
+ TaskStatus.STATUS_EXTRACTING]:
+ if dl.gid() == gid:
+ return dl
return None
def get_progress_bar_string(status):
@@ -42,7 +54,7 @@ def get_progress_bar_string(status):
cFull = p // 8
p_str = '⬤' * cFull
p_str += '○' * (12 - cFull)
- p_str = f"[{p_str}]"
+ p_str = f"「{p_str}」"
return p_str
def get_readable_message():
@@ -50,16 +62,43 @@ def get_readable_message():
msg = ""
for download in list(download_dict.values()):
msg += f"Name: {escape(str(download.name()))}
"
- msg += f"\n{get_progress_bar_string(download)} {download.progress()}"
- msg += f"\nSize: {get_readable_file_size(download.processed_bytes())} / {download.size()}"
- msg += f"\nTransfers: {download.processed_files()} / {download.files()}"
- msg += f"\nSpeed: {download.speed()}"
- msg += f"\nETA: {download.eta()}"
- msg += f"\n/{BotCommands.CancelCommand} {download.gid()}
"
+ msg += f"\nStatus: {download.status()}"
+ if download.status() not in [TaskStatus.STATUS_ARCHIVING,
+ TaskStatus.STATUS_EXTRACTING]:
+ msg += f"\n{get_progress_bar_string(download)} {download.progress()}"
+ if download.status() == TaskStatus.STATUS_CLONING:
+ msg += f"\nCloned: {get_readable_file_size(download.processed_bytes())} / {download.size()}"
+ msg += f"\nTransfers: {download.processed_files()} / {download.files()}"
+ elif download.status() == TaskStatus.STATUS_UPLOADING:
+ msg += f"\nUploaded: {get_readable_file_size(download.processed_bytes())} / {download.size()}"
+ else:
+ msg += f"\nDownloaded: {get_readable_file_size(download.processed_bytes())} / {download.size()}"
+ msg += f"\nSpeed: {download.speed()} | ETA: {download.eta()}"
+ msg += f"\n/{BotCommands.CancelCommand} {download.gid()}
"
+ else:
+ msg += f"\nSize: {download.size()}"
msg += "\n\n"
- sysmsg = "━━━━━━━━━━━━━━━"
- sysmsg += f"\nCPU: {cpu_percent()}% | RAM: {virtual_memory().percent}%"
- sysmsg += f"\nUPTIME: {get_readable_time(time.time() - botStartTime)}"
+ cpu = cpu_percent(interval=0.5)
+ ram = virtual_memory().percent
+ disk = disk_usage('/').percent
+ uptime = get_readable_time(time.time() - botStartTime)
+ sysmsg = f"CPU: {cpu}% | RAM: {ram}%"
+ sysmsg += f"\nDISK: {disk}% | UPTIME: {uptime}"
+ dlspeed_bytes = 0
+ upspeed_bytes = 0
+ for download in list(download_dict.values()):
+ spd = download.speed()
+ if download.status() == TaskStatus.STATUS_DOWNLOADING:
+ if 'KB/s' in spd:
+ dlspeed_bytes += float(spd.split('K')[0]) * 1024
+ elif 'MB/s' in spd:
+ dlspeed_bytes += float(spd.split('M')[0]) * 1048576
+ elif download.status() == TaskStatus.STATUS_UPLOADING:
+ if 'KB/s' in spd:
+ upspeed_bytes += float(spd.split('K')[0]) * 1024
+ elif 'MB/s' in spd:
+ upspeed_bytes += float(spd.split('M')[0]) * 1048576
+ sysmsg += f"\nDL: {get_readable_file_size(dlspeed_bytes)}/s | UL: {get_readable_file_size(upspeed_bytes)}/s"
return msg + sysmsg
def get_readable_file_size(size_in_bytes) -> str:
@@ -92,6 +131,10 @@ def get_readable_time(seconds: int) -> str:
result += f'{seconds}s'
return result
+def is_url(url: str):
+ url = re.findall(URL_REGEX, url)
+ return bool(url)
+
def is_gdrive_link(url: str):
return "drive.google.com" in url
diff --git a/bot/helper/ext_utils/exceptions.py b/bot/helper/ext_utils/exceptions.py
index 26a16ac4..30972967 100644
--- a/bot/helper/ext_utils/exceptions.py
+++ b/bot/helper/ext_utils/exceptions.py
@@ -1,2 +1,5 @@
-class ExceptionHandler(Exception):
+class CompressExceptionHandler(Exception):
+ pass
+
+class DDLExceptionHandler(Exception):
pass
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
new file mode 100644
index 00000000..d5afe062
--- /dev/null
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -0,0 +1,131 @@
+import magic
+import os
+import shutil
+import sys
+
+from bot import LOGGER, DOWNLOAD_DIR
+from bot.helper.ext_utils.exceptions import CompressExceptionHandler
+
+def clean_download(path: str):
+ if os.path.exists(path):
+ LOGGER.info(f"Cleaning: {path}")
+ try:
+ shutil.rmtree(path)
+ except:
+ pass
+
+def start_cleanup():
+ try:
+ shutil.rmtree(DOWNLOAD_DIR)
+ except:
+ pass
+ os.makedirs(DOWNLOAD_DIR)
+
+def clean_all():
+ try:
+ shutil.rmtree(DOWNLOAD_DIR)
+ except:
+ pass
+
+def exit_clean_up(signal, frame):
+ try:
+ LOGGER.info("Cleaning up the downloads and exiting")
+ clean_all()
+ sys.exit(0)
+ except KeyboardInterrupt:
+ LOGGER.warning("Force exiting before the cleanup finishes")
+ sys.exit(1)
+
+def get_path_size(path: str):
+ if os.path.isfile(path):
+ return os.path.getsize(path)
+ total_size = 0
+ for root, dirs, files in os.walk(path):
+ for f in files:
+ abs_path = os.path.join(root, f)
+ total_size += os.path.getsize(abs_path)
+ return total_size
+
+def get_base_name(orig_path: str):
+ if orig_path.endswith(".tar.bz2"):
+ return orig_path.rsplit(".tar.bz2", 1)[0]
+ elif orig_path.endswith(".tar.gz"):
+ return orig_path.rsplit(".tar.gz", 1)[0]
+ elif orig_path.endswith(".bz2"):
+ return orig_path.rsplit(".bz2", 1)[0]
+ elif orig_path.endswith(".gz"):
+ return orig_path.rsplit(".gz", 1)[0]
+ elif orig_path.endswith(".tar.xz"):
+ return orig_path.rsplit(".tar.xz", 1)[0]
+ elif orig_path.endswith(".tar"):
+ return orig_path.rsplit(".tar", 1)[0]
+ elif orig_path.endswith(".tbz2"):
+ return orig_path.rsplit("tbz2", 1)[0]
+ elif orig_path.endswith(".tgz"):
+ return orig_path.rsplit(".tgz", 1)[0]
+ elif orig_path.endswith(".zip"):
+ return orig_path.rsplit(".zip", 1)[0]
+ elif orig_path.endswith(".7z"):
+ return orig_path.rsplit(".7z", 1)[0]
+ elif orig_path.endswith(".Z"):
+ return orig_path.rsplit(".Z", 1)[0]
+ elif orig_path.endswith(".rar"):
+ return orig_path.rsplit(".rar", 1)[0]
+ elif orig_path.endswith(".iso"):
+ return orig_path.rsplit(".iso", 1)[0]
+ elif orig_path.endswith(".wim"):
+ return orig_path.rsplit(".wim", 1)[0]
+ elif orig_path.endswith(".cab"):
+ return orig_path.rsplit(".cab", 1)[0]
+ elif orig_path.endswith(".apm"):
+ return orig_path.rsplit(".apm", 1)[0]
+ elif orig_path.endswith(".arj"):
+ return orig_path.rsplit(".arj", 1)[0]
+ elif orig_path.endswith(".chm"):
+ return orig_path.rsplit(".chm", 1)[0]
+ elif orig_path.endswith(".cpio"):
+ return orig_path.rsplit(".cpio", 1)[0]
+ elif orig_path.endswith(".cramfs"):
+ return orig_path.rsplit(".cramfs", 1)[0]
+ elif orig_path.endswith(".deb"):
+ return orig_path.rsplit(".deb", 1)[0]
+ elif orig_path.endswith(".dmg"):
+ return orig_path.rsplit(".dmg", 1)[0]
+ elif orig_path.endswith(".fat"):
+ return orig_path.rsplit(".fat", 1)[0]
+ elif orig_path.endswith(".hfs"):
+ return orig_path.rsplit(".hfs", 1)[0]
+ elif orig_path.endswith(".lzh"):
+ return orig_path.rsplit(".lzh", 1)[0]
+ elif orig_path.endswith(".lzma"):
+ return orig_path.rsplit(".lzma", 1)[0]
+ elif orig_path.endswith(".lzma2"):
+ return orig_path.rsplit(".lzma2", 1)[0]
+ elif orig_path.endswith(".mbr"):
+ return orig_path.rsplit(".mbr", 1)[0]
+ elif orig_path.endswith(".msi"):
+ return orig_path.rsplit(".msi", 1)[0]
+ elif orig_path.endswith(".mslz"):
+ return orig_path.rsplit(".mslz", 1)[0]
+ elif orig_path.endswith(".nsis"):
+ return orig_path.rsplit(".nsis", 1)[0]
+ elif orig_path.endswith(".ntfs"):
+ return orig_path.rsplit(".ntfs", 1)[0]
+ elif orig_path.endswith(".rpm"):
+ return orig_path.rsplit(".rpm", 1)[0]
+ elif orig_path.endswith(".squashfs"):
+ return orig_path.rsplit(".squashfs", 1)[0]
+ elif orig_path.endswith(".udf"):
+ return orig_path.rsplit(".udf", 1)[0]
+ elif orig_path.endswith(".vhd"):
+ return orig_path.rsplit(".vhd", 1)[0]
+ elif orig_path.endswith(".xar"):
+ return orig_path.rsplit(".xar", 1)[0]
+ else:
+ raise CompressExceptionHandler('Unsupported file format')
+
+def get_mime_type(file_path):
+ mime = magic.Magic(mime=True)
+ mime_type = mime.from_file(file_path)
+ mime_type = mime_type or "text/plain"
+ return mime_type
diff --git a/bot/helper/status_utils/archive_status.py b/bot/helper/status_utils/archive_status.py
new file mode 100644
index 00000000..e29c02a2
--- /dev/null
+++ b/bot/helper/status_utils/archive_status.py
@@ -0,0 +1,34 @@
+from bot.helper.ext_utils.bot_utils import TaskStatus, get_readable_file_size
+
+class ArchiveStatus:
+ def __init__(self, name, path, size):
+ self.__name = name
+ self.__path = path
+ self.__size = size
+
+ # The progress of the archive function cannot be tracked. So we just return
+ # dummy values. If this is possible in the future, we should implement it.
+
+ def progress(self):
+ return '0'
+
+ def speed(self):
+ return '0'
+
+ def name(self):
+ return self.__name
+
+ def path(self):
+ return self.__path
+
+ def size(self):
+ return get_readable_file_size(self.__size)
+
+ def eta(self):
+ return '0s'
+
+ def status(self):
+ return TaskStatus.STATUS_ARCHIVING
+
+ def processed_bytes(self):
+ return 0
diff --git a/bot/helper/ext_utils/clone_status.py b/bot/helper/status_utils/clone_status.py
similarity index 86%
rename from bot/helper/ext_utils/clone_status.py
rename to bot/helper/status_utils/clone_status.py
index 8e911bd8..bf4f0be4 100644
--- a/bot/helper/ext_utils/clone_status.py
+++ b/bot/helper/status_utils/clone_status.py
@@ -1,4 +1,5 @@
-from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time
+from bot.helper.ext_utils.bot_utils import TaskStatus, get_readable_file_size, get_readable_time
+
class CloneStatus:
def __init__(self, obj, size, files, message, gid):
@@ -17,6 +18,9 @@ def size_raw(self):
def size(self):
return get_readable_file_size(self.__size)
+ def status(self):
+ return TaskStatus.STATUS_CLONING
+
def name(self):
return self.__obj.name
@@ -40,7 +44,7 @@ def progress(self):
def speed_raw(self):
"""
- :return: Download speed in Bytes/Seconds
+ :return: Clone speed in Bytes/Seconds
"""
return self.__obj.cspeed()
diff --git a/bot/helper/status_utils/download_status.py b/bot/helper/status_utils/download_status.py
new file mode 100644
index 00000000..8201470b
--- /dev/null
+++ b/bot/helper/status_utils/download_status.py
@@ -0,0 +1,59 @@
+from bot import DOWNLOAD_DIR
+from bot.helper.ext_utils.bot_utils import TaskStatus, get_readable_file_size, get_readable_time
+
+class DownloadStatus:
+ def __init__(self, obj, size, listener, gid):
+ self.__obj = obj
+ self.__size = size
+ self.__uid = listener.uid
+ self.message = listener.message
+ self.__gid = gid
+
+ def path(self):
+ return f"{DOWNLOAD_DIR}{self.__uid}"
+
+ def processed_bytes(self):
+ return self.__obj.downloaded_bytes
+
+ def size_raw(self):
+ return self.__size
+
+ def size(self):
+ return get_readable_file_size(self.__size)
+
+ def status(self):
+ return TaskStatus.STATUS_DOWNLOADING
+
+ def name(self):
+ return self.__obj.name
+
+ def gid(self) -> str:
+ return self.__gid
+
+ def progress_raw(self):
+ try:
+ return self.__obj.downloaded_bytes / self.__size * 100
+ except ZeroDivisionError:
+ return 0
+
+ def progress(self):
+ return f'{round(self.progress_raw(), 2)}%'
+
+ def speed_raw(self):
+ """
+ :return: Download speed in Bytes/Seconds
+ """
+ return self.__obj.dspeed()
+
+ def speed(self):
+ return f'{get_readable_file_size(self.speed_raw())}/s'
+
+ def eta(self):
+ try:
+ seconds = (self.__size - self.__obj.downloaded_bytes) / self.speed_raw()
+ return f'{get_readable_time(seconds)}'
+ except ZeroDivisionError:
+ return '-'
+
+ def download(self):
+ return self.__obj
diff --git a/bot/helper/status_utils/extract_status.py b/bot/helper/status_utils/extract_status.py
new file mode 100644
index 00000000..d47beeb4
--- /dev/null
+++ b/bot/helper/status_utils/extract_status.py
@@ -0,0 +1,34 @@
+from bot.helper.ext_utils.bot_utils import TaskStatus, get_readable_file_size
+
+class ExtractStatus:
+ def __init__(self, name, path, size):
+ self.__name = name
+ self.__path = path
+ self.__size = size
+
+ # The progress of the extract function cannot be tracked. So we just return
+ # dummy values. If this is possible in the future, we should implement it.
+
+ def progress(self):
+ return '0'
+
+ def speed(self):
+ return '0'
+
+ def name(self):
+ return self.__name
+
+ def path(self):
+ return self.__path
+
+ def size(self):
+ return get_readable_file_size(self.__size)
+
+ def eta(self):
+ return '0s'
+
+ def status(self):
+ return TaskStatus.STATUS_EXTRACTING
+
+ def processed_bytes(self):
+ return 0
diff --git a/bot/helper/status_utils/upload_status.py b/bot/helper/status_utils/upload_status.py
new file mode 100644
index 00000000..d9eb3b47
--- /dev/null
+++ b/bot/helper/status_utils/upload_status.py
@@ -0,0 +1,59 @@
+from bot import DOWNLOAD_DIR
+from bot.helper.ext_utils.bot_utils import TaskStatus, get_readable_file_size, get_readable_time
+
+class UploadStatus:
+ def __init__(self, obj, size, gid, listener):
+ self.__obj = obj
+ self.__size = size
+ self.__uid = listener.uid
+ self.__gid = gid
+ self.message = listener.message
+
+ def path(self):
+ return f"{DOWNLOAD_DIR}{self.__uid}"
+
+ def processed_bytes(self):
+ return self.__obj.uploaded_bytes
+
+ def size_raw(self):
+ return self.__size
+
+ def size(self):
+ return get_readable_file_size(self.__size)
+
+ def status(self):
+ return TaskStatus.STATUS_UPLOADING
+
+ def name(self):
+ return self.__obj.name
+
+ def progress_raw(self):
+ try:
+ return self.__obj.uploaded_bytes / self.__size * 100
+ except ZeroDivisionError:
+ return 0
+
+ def progress(self):
+ return f'{round(self.progress_raw(), 2)}%'
+
+ def speed_raw(self):
+ """
+ :return: Upload speed in Bytes/Seconds
+ """
+ return self.__obj.speed()
+
+ def speed(self):
+ return f'{get_readable_file_size(self.speed_raw())}/s'
+
+ def eta(self):
+ try:
+ seconds = (self.__size - self.__obj.uploaded_bytes) / self.speed_raw()
+ return f'{get_readable_time(seconds)}'
+ except ZeroDivisionError:
+ return '-'
+
+ def gid(self) -> str:
+ return self.__gid
+
+ def download(self):
+ return self.__obj
diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py
index b4914471..5bc37866 100644
--- a/bot/helper/telegram_helper/bot_commands.py
+++ b/bot/helper/telegram_helper/bot_commands.py
@@ -3,6 +3,8 @@ def __init__(self):
self.StartCommand = 'start'
self.ListCommand = 'find'
self.CloneCommand = 'clone'
+ self.ArchiveCommand = 'archive'
+ self.ExtractCommand = 'extract'
self.CountCommand = 'count'
self.CancelCommand = 'cancel'
self.StatusCommand = 'status'
diff --git a/bot/modules/auth.py b/bot/modules/auth.py
index c06c7246..baadf99a 100644
--- a/bot/modules/auth.py
+++ b/bot/modules/auth.py
@@ -1,6 +1,6 @@
from telegram.ext import CommandHandler
-from bot import AUTHORIZED_CHATS, DATABASE_URL, dispatcher
+from bot import dispatcher, AUTHORIZED_CHATS, DATABASE_URL
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.message_utils import sendMessage
diff --git a/bot/modules/cancel.py b/bot/modules/cancel.py
index 86faa6fd..79d7dd9c 100644
--- a/bot/modules/cancel.py
+++ b/bot/modules/cancel.py
@@ -1,39 +1,37 @@
from telegram.ext import CommandHandler
-from bot import LOGGER, dispatcher, OWNER_ID, download_dict, download_dict_lock
+from bot import dispatcher, OWNER_ID, download_dict, download_dict_lock
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.message_utils import sendMessage
-from bot.helper.ext_utils.bot_utils import getDownloadByGid
+from bot.helper.ext_utils.bot_utils import TaskStatus, getDownloadByGid
def cancelNode(update, context):
- LOGGER.info(f"User: {update.message.from_user.first_name} [{update.message.from_user.id}]")
args = update.message.text.split(" ", maxsplit=1)
user_id = update.message.from_user.id
if len(args) > 1:
gid = args[1]
dl = getDownloadByGid(gid)
if not dl:
- LOGGER.info("Cancelling: None")
return sendMessage(f"GID: {gid}
not found", context.bot, update.message)
elif update.message.reply_to_message:
task_message = update.message.reply_to_message
with download_dict_lock:
keys = list(download_dict.keys())
- try:
+ if task_message.message_id in keys:
dl = download_dict[task_message.message_id]
- except:
+ else:
dl = None
if not dl:
- LOGGER.info("Cancelling: None")
return sendMessage("Not an active task", context.bot, update.message)
elif len(args) == 1:
- msg = "Send a GID along with command"
- LOGGER.info("Cancelling: None")
- return sendMessage(msg, context.bot, update.message)
+ return sendMessage("Send a GID along with command", context.bot, update.message)
if OWNER_ID != user_id and dl.message.from_user.id != user_id:
- LOGGER.info("Cancelling: None")
return sendMessage("Not your task", context.bot, update.message)
+ if dl.status() == TaskStatus.STATUS_ARCHIVING:
+ sendMessage("Archival in progress, the task cannot be cancelled", context.bot, update.message)
+ elif dl.status() == TaskStatus.STATUS_EXTRACTING:
+ sendMessage("Extraction in progress, the task cannot be cancelled", context.bot, update.message)
else:
dl.download().cancel_task()
diff --git a/bot/modules/clone.py b/bot/modules/clone.py
index 6c9e6c8c..1f09bf3a 100644
--- a/bot/modules/clone.py
+++ b/bot/modules/clone.py
@@ -4,12 +4,12 @@
from telegram.ext import CommandHandler
from bot import LOGGER, dispatcher, CLONE_LIMIT, download_dict, download_dict_lock, Interval
+from bot.helper.download_utils.ddl_generator import appdrive, gdtot
from bot.helper.drive_utils.gdriveTools import GoogleDriveHelper
from bot.helper.ext_utils.bot_utils import new_thread, get_readable_file_size, is_gdrive_link, \
is_appdrive_link, is_gdtot_link
-from bot.helper.ext_utils.clone_status import CloneStatus
-from bot.helper.ext_utils.exceptions import ExceptionHandler
-from bot.helper.ext_utils.parser import appdrive, gdtot
+from bot.helper.ext_utils.exceptions import DDLExceptionHandler
+from bot.helper.status_utils.clone_status import CloneStatus
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage, deleteMessage, \
delete_all_messages, update_all_messages, sendStatusMessage
from bot.helper.telegram_helper.bot_commands import BotCommands
@@ -17,7 +17,6 @@
@new_thread
def cloneNode(update, context):
- LOGGER.info(f"User: {update.message.from_user.first_name} [{update.message.from_user.id}]")
args = update.message.text.split(" ", maxsplit=2)
reply_to = update.message.reply_to_message
link = ''
@@ -30,11 +29,13 @@ def cloneNode(update, context):
pass
if reply_to is not None:
link = reply_to.text
- if len(args) > 1:
+ try:
key = args[1]
+ except IndexError:
+ pass
is_appdrive = is_appdrive_link(link)
is_gdtot = is_gdtot_link(link)
- if (is_appdrive or is_gdtot):
+ if any([is_appdrive, is_gdtot]):
msg = sendMessage(f"Processing: {link}
", context.bot, update.message)
LOGGER.info(f"Processing: {link}")
try:
@@ -44,12 +45,13 @@ def cloneNode(update, context):
if is_gdtot:
link = gdtot(link)
deleteMessage(context.bot, msg)
- except ExceptionHandler as e:
+ except DDLExceptionHandler as e:
deleteMessage(context.bot, msg)
LOGGER.error(e)
return sendMessage(str(e), context.bot, update.message)
if is_gdrive_link(link):
msg = sendMessage(f"Checking: {link}
", context.bot, update.message)
+ LOGGER.info(f"Checking: {link}")
gd = GoogleDriveHelper()
res, size, name, files = gd.helper(link)
deleteMessage(context.bot, msg)
@@ -59,7 +61,7 @@ def cloneNode(update, context):
if size > CLONE_LIMIT * 1024**3:
msg2 = f"Name: {name}
"
msg2 += f"\nSize: {get_readable_file_size(size)}"
- msg2 += f"\nLimit: {CLONE_LIMIT} GiB"
+ msg2 += f"\nLimit: {CLONE_LIMIT} GB"
msg2 += "\n\n⚠️ Task failed"
return sendMessage(msg2, context.bot, update.message)
if files <= 20:
@@ -89,16 +91,18 @@ def cloneNode(update, context):
except IndexError:
pass
sendMessage(result, context.bot, update.message)
- if is_gdtot:
- LOGGER.info(f"Deleting: {link}")
- gd.deleteFile(link)
- elif is_appdrive:
+ if is_appdrive:
if appdict.get('link_type') == 'login':
LOGGER.info(f"Deleting: {link}")
gd.deleteFile(link)
+ elif is_gdtot:
+ LOGGER.info(f"Deleting: {link}")
+ gd.deleteFile(link)
else:
- sendMessage("Send a Drive / AppDrive / DriveApp / GDToT link along with command", context.bot, update.message)
- LOGGER.info("Cloning: None")
+ help_msg = 'Instructions\nSend a link along with command'
+ help_msg += '\n\nSupported Sites\n• Google Drive\n• AppDrive\n• DriveApp\n• GDToT'
+ help_msg += '\n\nSet Destination Drive\nAdd <key> after the link'
+ sendMessage(help_msg, context.bot, update.message)
clone_handler = CommandHandler(BotCommands.CloneCommand, cloneNode,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
diff --git a/bot/modules/compress.py b/bot/modules/compress.py
new file mode 100644
index 00000000..e5be1e90
--- /dev/null
+++ b/bot/modules/compress.py
@@ -0,0 +1,235 @@
+import os
+import re
+import requests
+import shutil
+import subprocess
+import threading
+
+from html import escape
+from pathlib import PurePath
+from telegram.ext import CommandHandler
+
+from bot import LOGGER, dispatcher, DOWNLOAD_DIR, Interval, INDEX_URL, download_dict, download_dict_lock
+from bot.helper.download_utils.ddl_generator import appdrive, gdtot
+from bot.helper.download_utils.gd_downloader import add_gd_download
+from bot.helper.drive_utils.gdriveTools import GoogleDriveHelper
+from bot.helper.ext_utils.bot_utils import is_gdrive_link, is_appdrive_link, is_gdtot_link
+from bot.helper.ext_utils.exceptions import CompressExceptionHandler, DDLExceptionHandler
+from bot.helper.ext_utils.fs_utils import clean_download, get_base_name, get_path_size
+from bot.helper.status_utils.archive_status import ArchiveStatus
+from bot.helper.status_utils.extract_status import ExtractStatus
+from bot.helper.status_utils.upload_status import UploadStatus
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, delete_all_messages, update_all_messages
+
+class CompressListener:
+ def __init__(self, bot, message, is_archive=False, is_extract=False, pswd=None):
+ self.bot = bot
+ self.message = message
+ self.uid = self.message.message_id
+ self.is_archive = is_archive
+ self.is_extract = is_extract
+ self.pswd = pswd
+
+ def clean(self):
+ try:
+ Interval[0].cancel()
+ del Interval[0]
+ delete_all_messages()
+ except IndexError:
+ pass
+
+ def onDownloadComplete(self):
+ with download_dict_lock:
+ download = download_dict[self.uid]
+ name = str(download.name()).replace('/', '')
+ gid = download.gid()
+ size = download.size_raw()
+ m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
+ if self.is_archive:
+ try:
+ with download_dict_lock:
+ download_dict[self.uid] = ArchiveStatus(name, m_path, size)
+ path = m_path + ".zip"
+ LOGGER.info(f"Archiving: {name}")
+ if self.pswd is not None:
+ subprocess.run(["7z", "a", "-mx=0", f"-p{self.pswd}", path, m_path])
+ else:
+ subprocess.run(["7z", "a", "-mx=0", path, m_path])
+ except FileNotFoundError:
+ LOGGER.info("File to archive not found")
+ self.onUploadError('Internal error')
+ return
+ try:
+ shutil.rmtree(m_path)
+ except:
+ os.remove(m_path)
+ elif self.is_extract:
+ try:
+ if os.path.isfile(m_path):
+ path = get_base_name(m_path)
+ LOGGER.info(f"Extracting: {name}")
+ with download_dict_lock:
+ download_dict[self.uid] = ExtractStatus(name, m_path, size)
+ if os.path.isdir(m_path):
+ for dirpath, subdir, files in os.walk(m_path, topdown=False):
+ for file_ in files:
+ if file_.endswith(".zip") or re.search(r'\.part0*1\.rar$|\.7z\.0*1$|\.zip\.0*1$', file_) \
+ or (file_.endswith(".rar") and not re.search(r'\.part\d+\.rar$', file_)):
+ m_path = os.path.join(dirpath, file_)
+ if self.pswd is not None:
+ result = subprocess.run(["7z", "x", f"-p{self.pswd}", m_path, f"-o{dirpath}", "-aot"])
+ else:
+ result = subprocess.run(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
+ if result.returncode != 0:
+ LOGGER.error("Failed to extract the archive")
+ for file_ in files:
+ if file_.endswith((".rar", ".zip")) or re.search(r'\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$', file_):
+ del_path = os.path.join(dirpath, file_)
+ os.remove(del_path)
+ path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
+ else:
+ if self.pswd is not None:
+ result = subprocess.run(["bash", "pextract", m_path, self.pswd])
+ else:
+ result = subprocess.run(["bash", "extract", m_path])
+ if result.returncode == 0:
+ os.remove(m_path)
+ else:
+ LOGGER.error("Failed to extract the archive")
+ path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
+ except CompressExceptionHandler:
+ LOGGER.info("Not a valid archive")
+ path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
+ else:
+ path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
+ up_name = PurePath(path).name
+ up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
+ size = get_path_size(up_path)
+ LOGGER.info(f"Uploading: {up_name}")
+ drive = GoogleDriveHelper(up_name, self)
+ upload_status = UploadStatus(drive, size, gid, self)
+ with download_dict_lock:
+ download_dict[self.uid] = upload_status
+ update_all_messages()
+ drive.upload(up_name)
+
+ def onDownloadError(self, error):
+ error = error.replace('<', '').replace('>', '')
+ clean_download(f'{DOWNLOAD_DIR}{self.uid}')
+ with download_dict_lock:
+ try:
+ del download_dict[self.uid]
+ except Exception as e:
+ LOGGER.error(str(e))
+ count = len(download_dict)
+ sendMessage(error, self.bot, self.message)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+
+ def onUploadComplete(self, link: str, size, files, folders, typ, name: str):
+ msg = f'Name: {escape(name)}
'
+ msg += f'\nSize: {size}'
+ msg += f'\nType: {typ}'
+ if os.path.isdir(f'{DOWNLOAD_DIR}{self.uid}/{name}'):
+ msg += f'\nSubFolders: {folders}'
+ msg += f'\nFiles: {files}'
+ msg += f'\n\nDrive Link'
+ if INDEX_URL is not None:
+ url_path = requests.utils.quote(f'{name}')
+ url = f'{INDEX_URL}/{url_path}'
+ if os.path.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{name}'):
+ url += '/'
+ msg += f' | Index Link'
+ else:
+ msg += f' | Index Link'
+ sendMessage(msg, self.bot, self.message)
+ clean_download(f'{DOWNLOAD_DIR}{self.uid}')
+ with download_dict_lock:
+ try:
+ del download_dict[self.uid]
+ except Exception as e:
+ LOGGER.error(str(e))
+ count = len(download_dict)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+
+ def onUploadError(self, error):
+ error = error.replace('<', '').replace('>', '')
+ clean_download(f'{DOWNLOAD_DIR}{self.uid}')
+ with download_dict_lock:
+ try:
+ del download_dict[self.uid]
+ except Exception as e:
+ LOGGER.error(str(e))
+ count = len(download_dict)
+ sendMessage(error, self.bot, self.message)
+ if count == 0:
+ self.clean()
+ else:
+ update_all_messages()
+
+def _compress(bot, message, is_archive=False, is_extract=False, pswd=None):
+ mesg = message.text.split('\n')
+ message_args = mesg[0].split(" ", maxsplit=1)
+ reply_to = message.reply_to_message
+ is_appdrive = False
+ is_gdtot = False
+ appdict = ''
+ try:
+ link = message_args[1]
+ if link.startswith("pswd: "):
+ raise IndexError
+ except:
+ link = ''
+ link = re.split(r"pswd:| \|", link)[0]
+ link = link.strip()
+ pswdMsg = mesg[0].split(' pswd: ')
+ if len(pswdMsg) > 1:
+ pswd = pswdMsg[1]
+ if reply_to is not None:
+ reply_text = reply_to.text
+ link = reply_text.strip()
+ is_appdrive = is_appdrive_link(link)
+ is_gdtot = is_gdtot_link(link)
+ if any([is_appdrive, is_gdtot]):
+ msg = sendMessage(f"Processing: {link}
", bot, message)
+ LOGGER.info(f"Processing: {link}")
+ try:
+ if is_appdrive:
+ appdict = appdrive(link)
+ link = appdict.get('gdrive_link')
+ if is_gdtot:
+ link = gdtot(link)
+ deleteMessage(bot, msg)
+ except DDLExceptionHandler as e:
+ deleteMessage(bot, msg)
+ LOGGER.error(e)
+ return sendMessage(str(e), bot, message)
+ listener = CompressListener(bot, message, is_archive, is_extract, pswd)
+ if is_gdrive_link(link):
+ threading.Thread(target=add_gd_download, args=(link, listener, is_appdrive, appdict, is_gdtot)).start()
+ else:
+ help_msg = 'Instructions\nSend a link along with command'
+ help_msg += '\n\nSupported Sites\n• Google Drive\n• AppDrive\n• DriveApp\n• GDToT'
+ help_msg += '\n\nSet Password\nAdd "pswd: xxx
" after the link'
+ sendMessage(help_msg, bot, message)
+
+
+def archive_data(update, context):
+ _compress(context.bot, update.message, is_archive=True)
+
+def extract_data(update, context):
+ _compress(context.bot, update.message, is_extract=True)
+
+archive_handler = CommandHandler(BotCommands.ArchiveCommand, archive_data,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+extract_handler = CommandHandler(BotCommands.ExtractCommand, extract_data,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
+dispatcher.add_handler(archive_handler)
+dispatcher.add_handler(extract_handler)
diff --git a/bot/modules/count.py b/bot/modules/count.py
index 0bb363ce..b4660bd2 100644
--- a/bot/modules/count.py
+++ b/bot/modules/count.py
@@ -9,7 +9,6 @@
@new_thread
def countNode(update, context):
- LOGGER.info(f"User: {update.message.from_user.first_name} [{update.message.from_user.id}]")
args = update.message.text.split(" ", maxsplit=1)
reply_to = update.message.reply_to_message
link = ''
@@ -26,7 +25,6 @@ def countNode(update, context):
sendMessage(result, context.bot, update.message)
else:
sendMessage("Send a Drive link along with command", context.bot, update.message)
- LOGGER.info("Counting: None")
count_handler = CommandHandler(BotCommands.CountCommand, countNode,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
diff --git a/bot/modules/delete.py b/bot/modules/delete.py
index 473eccb2..3cb6af3c 100644
--- a/bot/modules/delete.py
+++ b/bot/modules/delete.py
@@ -9,7 +9,6 @@
@new_thread
def deleteNode(update, context):
- LOGGER.info(f"User: {update.message.from_user.first_name} [{update.message.from_user.id}]")
args = update.message.text.split(" ", maxsplit=1)
reply_to = update.message.reply_to_message
link = ''
@@ -26,7 +25,6 @@ def deleteNode(update, context):
sendMessage(result, context.bot, update.message)
else:
sendMessage("Send a Drive link along with command", context.bot, update.message)
- LOGGER.info("Deleting: None")
delete_handler = CommandHandler(BotCommands.DeleteCommand, deleteNode,
filters=CustomFilters.owner_filter, run_async=True)
diff --git a/bot/modules/list.py b/bot/modules/list.py
index bfe3d5d5..957e3d78 100644
--- a/bot/modules/list.py
+++ b/bot/modules/list.py
@@ -7,7 +7,6 @@
from bot.helper.telegram_helper.message_utils import sendMessage, editMessage
def list_drive(update, context):
- LOGGER.info(f"User: {update.message.from_user.first_name} [{update.message.from_user.id}]")
args = update.message.text.split(" ", maxsplit=1)
reply_to = update.message.reply_to_message
query = ''
@@ -26,8 +25,10 @@ def list_drive(update, context):
LOGGER.exception(e)
editMessage(msg, reply, button)
else:
- sendMessage('Send a Query along with command', context.bot, update.message)
- LOGGER.info("Finding: None")
+ help_msg = 'Instructions\nSend a Query along with command'
+ help_msg += '\n\nGet Folder Results\nAdd "-d
" before the Query'
+ help_msg += '\n\nGet File Results\nAdd "-f
" before the Query'
+ sendMessage(help_msg, context.bot, update.message)
list_handler = CommandHandler(BotCommands.ListCommand, list_drive,
filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True)
diff --git a/bot/modules/permission.py b/bot/modules/permission.py
index ebc4020c..03aa2582 100644
--- a/bot/modules/permission.py
+++ b/bot/modules/permission.py
@@ -9,7 +9,6 @@
@new_thread
def permissionNode(update, context):
- LOGGER.info(f"User: {update.message.from_user.first_name} [{update.message.from_user.id}]")
args = update.message.text.split(" ", maxsplit=2)
reply_to = update.message.reply_to_message
link = ''
@@ -22,8 +21,10 @@ def permissionNode(update, context):
pass
if reply_to is not None:
link = reply_to.text
- if len(args) > 1:
+ try:
access = args[1]
+ except IndexError:
+ pass
if is_gdrive_link(link):
msg = sendMessage(f"Setting permission: {link}
", context.bot, update.message)
LOGGER.info(f"Setting permission: {link}")
@@ -33,7 +34,6 @@ def permissionNode(update, context):
sendMessage(result, context.bot, update.message)
else:
sendMessage("Send a Drive link along with command", context.bot, update.message)
- LOGGER.info("Setting permission: None")
permission_handler = CommandHandler(BotCommands.PermissionCommand, permissionNode,
filters=CustomFilters.owner_filter, run_async=True)
diff --git a/bot/modules/status.py b/bot/modules/status.py
index 0f52e98e..b8a1f0dd 100644
--- a/bot/modules/status.py
+++ b/bot/modules/status.py
@@ -1,10 +1,8 @@
import time
-from psutil import cpu_percent, virtual_memory
from telegram.ext import CommandHandler
-from bot import dispatcher, botStartTime, download_dict, download_dict_lock, status_reply_dict, status_reply_dict_lock
-from bot.helper.ext_utils.bot_utils import get_readable_time
+from bot import dispatcher, download_dict, download_dict_lock, status_reply_dict, status_reply_dict_lock
from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, sendStatusMessage
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.filters import CustomFilters
@@ -12,10 +10,7 @@
def statusNode(update, context):
with download_dict_lock:
if len(download_dict) == 0:
- message = "No active task\n━━━━━━━━━━━━━━━"
- message += f"\nCPU: {cpu_percent()}% | RAM: {virtual_memory().percent}%"
- message += f"\nUPTIME: {get_readable_time(time.time() - botStartTime)}"
- return sendMessage(message, context.bot, update.message)
+ return sendMessage("No active task", context.bot, update.message)
index = update.effective_chat.id
with status_reply_dict_lock:
if index in status_reply_dict.keys():
diff --git a/config_sample.env b/config_sample.env
index ffbd8a23..b44c1876 100644
--- a/config_sample.env
+++ b/config_sample.env
@@ -2,6 +2,7 @@
BOT_TOKEN=
OWNER_ID=
DRIVE_FOLDER_ID=
+DOWNLOAD_DIR=/usr/src/app/downloads
# OPTIONAL CONFIG
AUTHORIZED_CHATS=
DATABASE_URL=
@@ -11,6 +12,7 @@ STATUS_UPDATE_INTERVAL=
TELEGRAPH_ACCS=
INDEX_URL=
CLONE_LIMIT=
+COMPRESS_LIMIT=
TOKEN_JSON_URL=
ACCOUNTS_ZIP_URL=
DRIVE_LIST_URL=
diff --git a/extract b/extract
new file mode 100755
index 00000000..7500a407
--- /dev/null
+++ b/extract
@@ -0,0 +1,199 @@
+#!/bin/bash
+
+if [ $# -lt 1 ]; then
+ echo "Usage: $(basename $0) FILES"
+ exit 1
+fi
+
+extract() {
+ arg="$1"
+ cd "$(dirname "$arg")" || exit
+ case "$arg" in
+ *.tar.bz2)
+ tar xjf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.tar.gz)
+ tar xzf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.bz2)
+ bunzip2 "$arg"
+ local code=$?
+ ;;
+ *.gz)
+ gunzip "$arg"
+ local code=$?
+ ;;
+ *.tar)
+ tar xf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.tbz2)
+ (tar xjf "$arg" --one-top-level)
+ local code=$?
+ ;;
+ *.tgz)
+ tar xzf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.tar.xz)
+ a_dir=$(expr "$arg" : '\(.*\).tar.xz')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.zip)
+ a_dir=$(expr "$arg" : '\(.*\).zip')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.7z)
+ a_dir=$(expr "$arg" : '\(.*\).7z')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.Z)
+ uncompress "$arg"
+ local code=$?
+ ;;
+ *.rar)
+ a_dir=$(expr "$arg" : '\(.*\).rar')
+ mkdir "$a_dir"
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.iso)
+ a_dir=$(expr "$arg" : '\(.*\).iso')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.wim)
+ a_dir=$(expr "$arg" : '\(.*\).wim')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.cab)
+ a_dir=$(expr "$arg" : '\(.*\).cab')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.apm)
+ a_dir=$(expr "$arg" : '\(.*\).apm')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.arj)
+ a_dir=$(expr "$arg" : '\(.*\).arj')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.chm)
+ a_dir=$(expr "$arg" : '\(.*\).chm')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.cpio)
+ a_dir=$(expr "$arg" : '\(.*\).cpio')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.cramfs)
+ a_dir=$(expr "$arg" : '\(.*\).cramfs')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.deb)
+ a_dir=$(expr "$arg" : '\(.*\).deb')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.dmg)
+ a_dir=$(expr "$arg" : '\(.*\).dmg')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.fat)
+ a_dir=$(expr "$arg" : '\(.*\).fat')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.hfs)
+ a_dir=$(expr "$arg" : '\(.*\).hfs')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.lzh)
+ a_dir=$(expr "$arg" : '\(.*\).lzh')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.lzma)
+ a_dir=$(expr "$arg" : '\(.*\).lzma')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.lzma2)
+ a_dir=$(expr "$arg" : '\(.*\).lzma2')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.mbr)
+ a_dir=$(expr "$arg" : '\(.*\).mbr')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.msi)
+ a_dir=$(expr "$arg" : '\(.*\).msi')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.mslz)
+ a_dir=$(expr "$arg" : '\(.*\).mslz')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.nsis)
+ a_dir=$(expr "$arg" : '\(.*\).nsis')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.ntfs)
+ a_dir=$(expr "$arg" : '\(.*\).ntfs')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.rpm)
+ a_dir=$(expr "$arg" : '\(.*\).rpm')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.squashfs)
+ a_dir=$(expr "$arg" : '\(.*\).squashfs')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.udf)
+ a_dir=$(expr "$arg" : '\(.*\).udf')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.vhd)
+ a_dir=$(expr "$arg" : '\(.*\).vhd')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *.xar)
+ a_dir=$(expr "$arg" : '\(.*\).xar')
+ 7z x "$arg" -o"$a_dir"
+ local code=$?
+ ;;
+ *)
+ echo "'$arg' cannot be extracted via extract()" 1>&2
+ exit 1
+ ;;
+ esac
+ cd - || exit $?
+ exit $code
+}
+
+extract "$1"
diff --git a/pextract b/pextract
new file mode 100755
index 00000000..7c53e08a
--- /dev/null
+++ b/pextract
@@ -0,0 +1,200 @@
+#!/bin/bash
+
+if [ $# -lt 1 ]; then
+ echo "Usage: $(basename $0) FILES"
+ exit 1
+fi
+
+extract() {
+ arg="$1"
+ pswd="$2"
+ cd "$(dirname "$arg")" || exit
+ case "$arg" in
+ *.tar.bz2)
+ tar xjf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.tar.gz)
+ tar xzf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.bz2)
+ bunzip2 "$arg"
+ local code=$?
+ ;;
+ *.gz)
+ gunzip "$arg"
+ local code=$?
+ ;;
+ *.tar)
+ tar xf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.tbz2)
+ (tar xjf "$arg" --one-top-level)
+ local code=$?
+ ;;
+ *.tgz)
+ tar xzf "$arg" --one-top-level
+ local code=$?
+ ;;
+ *.tar.xz)
+ a_dir=$(expr "$arg" : '\(.*\).tar.xz')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.zip)
+ a_dir=$(expr "$arg" : '\(.*\).zip')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.7z)
+ a_dir=$(expr "$arg" : '\(.*\).7z')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.Z)
+ uncompress "$arg"
+ local code=$?
+ ;;
+ *.rar)
+ a_dir=$(expr "$arg" : '\(.*\).rar')
+ mkdir "$a_dir"
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.iso)
+ a_dir=$(expr "$arg" : '\(.*\).iso')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.wim)
+ a_dir=$(expr "$arg" : '\(.*\).wim')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.cab)
+ a_dir=$(expr "$arg" : '\(.*\).cab')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.apm)
+ a_dir=$(expr "$arg" : '\(.*\).apm')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.arj)
+ a_dir=$(expr "$arg" : '\(.*\).arj')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.chm)
+ a_dir=$(expr "$arg" : '\(.*\).chm')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.cpio)
+ a_dir=$(expr "$arg" : '\(.*\).cpio')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.cramfs)
+ a_dir=$(expr "$arg" : '\(.*\).cramfs')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.deb)
+ a_dir=$(expr "$arg" : '\(.*\).deb')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.dmg)
+ a_dir=$(expr "$arg" : '\(.*\).dmg')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.fat)
+ a_dir=$(expr "$arg" : '\(.*\).fat')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.hfs)
+ a_dir=$(expr "$arg" : '\(.*\).hfs')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.lzh)
+ a_dir=$(expr "$arg" : '\(.*\).lzh')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.lzma)
+ a_dir=$(expr "$arg" : '\(.*\).lzma')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.lzma2)
+ a_dir=$(expr "$arg" : '\(.*\).lzma2')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.mbr)
+ a_dir=$(expr "$arg" : '\(.*\).mbr')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.msi)
+ a_dir=$(expr "$arg" : '\(.*\).msi')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.mslz)
+ a_dir=$(expr "$arg" : '\(.*\).mslz')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.nsis)
+ a_dir=$(expr "$arg" : '\(.*\).nsis')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.ntfs)
+ a_dir=$(expr "$arg" : '\(.*\).ntfs')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.rpm)
+ a_dir=$(expr "$arg" : '\(.*\).rpm')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.squashfs)
+ a_dir=$(expr "$arg" : '\(.*\).squashfs')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.udf)
+ a_dir=$(expr "$arg" : '\(.*\).udf')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.vhd)
+ a_dir=$(expr "$arg" : '\(.*\).vhd')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *.xar)
+ a_dir=$(expr "$arg" : '\(.*\).xar')
+ 7z x "$arg" -o"$a_dir" -p"$pswd"
+ local code=$?
+ ;;
+ *)
+ echo "'$arg' cannot be extracted via extract()" 1>&2
+ exit 1
+ ;;
+ esac
+ cd - || exit $?
+ exit $code
+}
+
+extract "$1" "$2"