Skip to content

Commit

Permalink
Fix bot getting stuck while cloning folders
Browse files Browse the repository at this point in the history
- This fixes an issue where the bot was getting stuck when authorizing with token.json file while cloning folders having 20+ files
- Rename compress module to archive module
- Change the compression data command of archive module from /archive to /compress
- Tidy up
  • Loading branch information
l3v11 authored Sep 13, 2022
1 parent 378521f commit d28ae9c
Show file tree
Hide file tree
Showing 17 changed files with 86 additions and 86 deletions.
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@ SearchX is a multipurpose Telegram bot written in Python for Google Drive

- Search data in Google Drive
- Clone data from Google Drive, AppDrive and GDToT
- Archive data from Google Drive, AppDrive and GDToT
- Compress data from Google Drive, AppDrive and GDToT
- Extract data from Google Drive, AppDrive and GDToT
- Count data from Google Drive
- Delete data from Google Drive
- Set data permission in Google Drive
- Size Limit support for Clone, Archive and Extraction tasks
- Size Limit support for Clone, Compression and Extraction tasks
- Destination change support for Clone tasks
- MongoDB support for storing the list of authorized users
- Index Link support
- Multi-token telegraph support
Expand Down Expand Up @@ -54,7 +55,7 @@ This list of commands is supported by the bot
start - Start the bot
find - Search data in Google Drive
clone - Clone data to Google Drive
archive - Archive data to Google Drive
compress - Compress data to Google Drive
extract - Extract data to Google Drive
count - Count data from Google Drive
cancel - Cancel a task
Expand Down
8 changes: 4 additions & 4 deletions bot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,12 +151,12 @@ def get_config(name: str):
CLONE_LIMIT = None

try:
COMPRESS_LIMIT = get_config('COMPRESS_LIMIT')
if len(COMPRESS_LIMIT) == 0:
ARCHIVE_LIMIT = get_config('ARCHIVE_LIMIT')
if len(ARCHIVE_LIMIT) == 0:
raise KeyError
COMPRESS_LIMIT = float(COMPRESS_LIMIT)
ARCHIVE_LIMIT = float(ARCHIVE_LIMIT)
except:
COMPRESS_LIMIT = None
ARCHIVE_LIMIT = None

try:
TOKEN_JSON_URL = get_config('TOKEN_JSON_URL')
Expand Down
4 changes: 2 additions & 2 deletions bot/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from telegram.ext import CommandHandler

from bot import bot, LOGGER, botStartTime, AUTHORIZED_CHATS, TELEGRAPH, Interval, dispatcher, updater
from bot.modules import auth, cancel, clone, compress, count, delete, eval, list, permission, shell, status
from bot.modules import archive, auth, cancel, clone, count, delete, eval, list, permission, shell, status
from bot.helper.ext_utils.bot_utils import get_readable_file_size, get_readable_time
from bot.helper.ext_utils.fs_utils import start_cleanup, clean_all, exit_clean_up
from bot.helper.telegram_helper.bot_commands import BotCommands
Expand Down Expand Up @@ -72,7 +72,7 @@ def restart(update, context):
<br><br>
• <b>/{BotCommands.CloneCommand}</b> &lt;url&gt; &lt;dest_id&gt;: Clone data from Google Drive, AppDrive and GDToT (Destination ID optional)
<br><br>
• <b>/{BotCommands.ArchiveCommand}</b> &lt;url&gt;: Archive data from Google Drive, AppDrive and GDToT
• <b>/{BotCommands.CompressCommand}</b> &lt;url&gt;: Compress data from Google Drive, AppDrive and GDToT
<br><br>
• <b>/{BotCommands.ExtractCommand}</b> &lt;url&gt;: Extract data from Google Drive, AppDrive and GDToT
<br><br>
Expand Down
8 changes: 4 additions & 4 deletions bot/helper/download_utils/gd_downloader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import random
import string

from bot import LOGGER, COMPRESS_LIMIT, download_dict, download_dict_lock
from bot import LOGGER, ARCHIVE_LIMIT, download_dict, download_dict_lock
from bot.helper.drive_utils.gdriveTools import GoogleDriveHelper
from bot.helper.status_utils.download_status import DownloadStatus
from bot.helper.telegram_helper.message_utils import sendMessage, deleteMessage, sendStatusMessage
Expand All @@ -17,11 +17,11 @@ def add_gd_download(link, path, listener, customname, is_appdrive, appdict, is_g
return sendMessage(res, listener.bot, listener.message)
if customname:
name = customname
if COMPRESS_LIMIT is not None:
if size > COMPRESS_LIMIT * 1024**3:
if ARCHIVE_LIMIT is not None:
if size > ARCHIVE_LIMIT * 1024**3:
msg2 = f"<b>Name:</b> <code>{name}</code>"
msg2 += f"\n<b>Size:</b> {get_readable_file_size(size)}"
msg2 += f"\n<b>Limit:</b> {COMPRESS_LIMIT} GB"
msg2 += f"\n<b>Limit:</b> {ARCHIVE_LIMIT} GB"
msg2 += "\n\n<b>⚠️ Task failed</b>"
return sendMessage(msg2, listener.bot, listener.message)
LOGGER.info(f"Downloading: {name}")
Expand Down
19 changes: 9 additions & 10 deletions bot/helper/drive_utils/gdriveTools.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ def clone(self, link, dest_id):
token_service = self.__alt_authorize()
if token_service is not None:
self.__service = token_service
return self.clone(link, key)
return self.clone(link, dest_id)
msg = "File not found"
else:
msg = err
Expand Down Expand Up @@ -591,7 +591,7 @@ def upload(self, file_name: str):
if isinstance(err, RetryError):
LOGGER.info(f"Total attempts: {err.last_attempt.attempt_number}")
err = err.last_attempt.exception()
self.__listener.onUploadError(str(err))
self.__listener.onUploadError(err)
self.__is_errored = True
finally:
self.__updater.cancel()
Expand Down Expand Up @@ -724,9 +724,9 @@ def __create_page(self, acc, content):
author_name="Levi",
author_url="https://t.me/l3v11",
html_content=content)['path'])
except RetryAfterError as e:
LOGGER.info(f"Cooldown: {e.retry_after} seconds")
time.sleep(e.retry_after)
except RetryAfterError as err:
LOGGER.info(f"Cooldown: {err.retry_after} seconds")
time.sleep(err.retry_after)
self.__create_page(acc, content)

def __edit_page(self, acc, content, path):
Expand All @@ -737,13 +737,12 @@ def __edit_page(self, acc, content, path):
author_name="Levi",
author_url="https://t.me/l3v11",
html_content=content)
except RetryAfterError as e:
LOGGER.info(f"Cooldown: {e.retry_after} seconds")
time.sleep(e.retry_after)
except RetryAfterError as err:
LOGGER.info(f"Cooldown: {err.retry_after} seconds")
time.sleep(err.retry_after)
self.__edit_page(acc, content, path)

def __receive_callback(self, request_id, response, exception):
# request_id = order number of request = shared drive index (1 based)
if exception is not None:
exception = str(exception).replace('>', '').replace('<', '')
LOGGER.error(exception)
Expand Down Expand Up @@ -809,7 +808,7 @@ def drive_list(self, file_name):
msg = f'<h4>Query: {file_name}</h4><br>'
add_title_msg = False
msg += f"╾────────────╼<br><b>{DRIVE_NAMES[index]}</b><br>╾────────────╼<br>"
# Detect whether the current entity is a folder or file
# Detect whether the current entity is a folder or a file
for file in self.response[files]["files"]:
if file.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE:
msg += f"🗂️<code>{file.get('name')}</code> <b>(folder)</b><br>" \
Expand Down
2 changes: 1 addition & 1 deletion bot/helper/ext_utils/bot_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class TaskStatus:
STATUS_CLONING = "Cloning"
STATUS_DOWNLOADING = "Downloading"
STATUS_UPLOADING = "Uploading"
STATUS_ARCHIVING = "Archiving"
STATUS_COMPRESSING = "Compressing"
STATUS_EXTRACTING = "Extracting"

class SetInterval:
Expand Down
2 changes: 1 addition & 1 deletion bot/helper/ext_utils/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
class CompressExceptionHandler(Exception):
class ArchiveExceptionHandler(Exception):
pass

class DDLExceptionHandler(Exception):
Expand Down
6 changes: 3 additions & 3 deletions bot/helper/ext_utils/fs_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import sys

from bot import LOGGER, DOWNLOAD_DIR
from bot.helper.ext_utils.exceptions import CompressExceptionHandler
from bot.helper.ext_utils.exceptions import ArchiveExceptionHandler

ARCH_EXT = [".tar.bz2", ".tar.gz", ".bz2", ".gz", ".tar.xz", ".tar", ".tbz2", ".tgz", ".lzma2",
".zip", ".7z", ".z", ".rar", ".iso", ".wim", ".cab", ".apm", ".arj", ".chm",
Expand Down Expand Up @@ -48,7 +48,7 @@ def clean_all():

def exit_clean_up(signal, frame):
try:
LOGGER.info("Cleaning up the downloads and exiting")
LOGGER.info("Cleaning up and exiting")
clean_all()
sys.exit(0)
except KeyboardInterrupt:
Expand All @@ -71,7 +71,7 @@ def get_base_name(orig_path: str):
ext = ext[0]
return re.split(ext + '$', orig_path, maxsplit=1, flags=re.I)[0]
else:
raise CompressExceptionHandler('Unsupported file format')
raise ArchiveExceptionHandler("Unsupported archive file")

def get_mime_type(file_path):
mime = magic.Magic(mime=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from bot.helper.ext_utils.bot_utils import TaskStatus, get_readable_file_size, get_readable_time
from bot.helper.ext_utils.fs_utils import get_path_size

class ArchiveStatus:
class CompressStatus:
def __init__(self, name, size, gid, listener):
self.__name = name
self.__size = size
Expand Down Expand Up @@ -49,7 +49,7 @@ def eta(self):
return '-'

def status(self):
return TaskStatus.STATUS_ARCHIVING
return TaskStatus.STATUS_COMPRESSING

def processed_bytes(self):
return get_path_size(f"{DOWNLOAD_DIR}{self.__uid}") - self.__size
Expand All @@ -58,7 +58,7 @@ def download(self):
return self

def cancel_task(self):
LOGGER.info(f"Cancelling archive: {self.__name}")
LOGGER.info(f"Cancelling compress: {self.__name}")
if self.__listener.suproc is not None:
self.__listener.suproc.kill()
self.__listener.onUploadError("The archive task has been cancelled")
self.__listener.onUploadError("The compression task has been cancelled")
2 changes: 1 addition & 1 deletion bot/helper/status_utils/extract_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,4 @@ def cancel_task(self):
LOGGER.info(f"Cancelling extract: {self.__name}")
if self.__listener.suproc is not None:
self.__listener.suproc.kill()
self.__listener.onUploadError("The extract task has been cancelled")
self.__listener.onUploadError("The extraction task has been cancelled")
2 changes: 1 addition & 1 deletion bot/helper/telegram_helper/bot_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ def __init__(self):
self.StartCommand = 'start'
self.ListCommand = 'find'
self.CloneCommand = 'clone'
self.ArchiveCommand = 'archive'
self.CompressCommand = 'compress'
self.ExtractCommand = 'extract'
self.CountCommand = 'count'
self.CancelCommand = 'cancel'
Expand Down
24 changes: 12 additions & 12 deletions bot/helper/telegram_helper/message_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ def sendMessage(text: str, bot, message: Message):
LOGGER.warning(str(r))
time.sleep(r.retry_after * 1.5)
return sendMessage(text, bot, message)
except Exception as e:
LOGGER.error(str(e))
except Exception as err:
LOGGER.error(str(err))
return

def sendMarkup(text: str, bot, message: Message, reply_markup: InlineKeyboardMarkup):
Expand All @@ -32,8 +32,8 @@ def sendMarkup(text: str, bot, message: Message, reply_markup: InlineKeyboardMar
LOGGER.warning(str(r))
time.sleep(r.retry_after * 1.5)
return sendMarkup(text, bot, message, reply_markup)
except Exception as e:
LOGGER.error(str(e))
except Exception as err:
LOGGER.error(str(err))
return

def editMessage(text: str, message: Message, reply_markup=None):
Expand All @@ -46,16 +46,16 @@ def editMessage(text: str, message: Message, reply_markup=None):
LOGGER.warning(str(r))
time.sleep(r.retry_after * 1.5)
return editMessage(text, message, reply_markup)
except Exception as e:
LOGGER.error(str(e))
return str(e)
except Exception as err:
LOGGER.error(str(err))
return str(err)

def deleteMessage(bot, message: Message):
try:
bot.deleteMessage(chat_id=message.chat.id,
message_id=message.message_id)
except Exception as e:
LOGGER.error(str(e))
except Exception as err:
LOGGER.error(str(err))

def sendLogFile(bot, message: Message):
with open('log.txt', 'rb') as f:
Expand All @@ -69,12 +69,12 @@ def delete_all_messages():
try:
deleteMessage(bot, data[0])
del status_reply_dict[data[0].chat.id]
except Exception as e:
LOGGER.error(str(e))
except Exception as err:
LOGGER.error(str(err))

def update_all_messages(force=False):
with status_reply_dict_lock:
if not force and (not status_reply_dict or not Interval or time.time() - list(status_reply_dict.values())[0][1] < 3):
if not status_reply_dict or not Interval or (not force and time.time() - list(status_reply_dict.values())[0][1] < 3):
return
for chat_id in status_reply_dict:
status_reply_dict[chat_id][1] = time.time()
Expand Down
Loading

0 comments on commit d28ae9c

Please sign in to comment.