Skip to content

Commit

Permalink
add tons of debug for statistics
Browse files Browse the repository at this point in the history
  • Loading branch information
olegeech-me committed Nov 18, 2024
1 parent 74448eb commit 3dc5035
Showing 1 changed file with 17 additions and 2 deletions.
19 changes: 17 additions & 2 deletions src/bot/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,18 @@ def _get_period_dates(self, period_days):
period_days = STATISTICS_PERIOD_DAYS
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=period_days)
logger.debug(f"Period dates calculated: start_date={start_date}, end_date={end_date}")
return start_date, end_date

async def get_general_stats(self, period_days=None):
"""Gather general statistics for each application state"""
start_date, end_date = self._get_period_dates(period_days)
applications = await self.db.fetch_application_states_within_period(start_date, end_date)
logger.debug(f"Fetched {len(applications)} applications within period")

total_applications = len(applications)
status_counts = Counter(app['application_state'] for app in applications)
logger.debug(f"Application status counts: {status_counts}")

return {
'total_applications': total_applications,
Expand All @@ -40,10 +43,13 @@ async def calculate_average_processing_times(self, period_days=None):
start_date, end_date = self._get_period_dates(period_days)
processing_times = await self.db.fetch_processing_times_within_period(start_date, end_date)
if not processing_times:
logger.debug("No processing times available")
return None, {}

logger.debug(f"Processing times fetched: {processing_times}")
total_time = sum(app['processing_time'] for app in processing_times)
overall_average = total_time / len(processing_times)
logger.debug(f"Overall average processing time: {overall_average} seconds")

times_by_category = {}
for app in processing_times:
Expand All @@ -55,23 +61,27 @@ async def calculate_average_processing_times(self, period_days=None):
app_type: sum(times) / len(times)
for app_type, times in times_by_category.items()
}

logger.debug(f"Average processing times by category: {average_times_by_category}")
return overall_average, average_times_by_category

async def get_common_update_time(self, period_days=None):
"""Get most common hour when MVCR uploads new results"""
start_date, end_date = self._get_period_dates(period_days)
hours = await self.db.fetch_status_change_hours_within_period(start_date, end_date)
if not hours:
logger.debug("No status change hours available")
return None

logger.debug(f"Status change hours fetched: {hours}")
hour_counts = Counter(hours)
most_common_hour, _ = hour_counts.most_common(1)[0]
logger.debug(f"Most common update hour: {most_common_hour}")
return int(most_common_hour)

async def predict_user_application_time(self, chat_id, period_days=None):
"""Predict the approval time for the user's applications"""
applications = await self.db.fetch_user_subscriptions(chat_id)
logger.debug(f"User {chat_id} has {len(applications)} applications: {applications}")

# Get average processing times by category
_overall_avg, avg_times_by_category = await self.calculate_average_processing_times(period_days)
Expand All @@ -84,9 +94,14 @@ async def predict_user_application_time(self, chat_id, period_days=None):
type = app['application_type']
avg_time = avg_times_by_category.get(type)
if not avg_time:
logger.debug(f"No average time available for application type {type}")
continue # Skip if no average time is available for this type

logger.debug(f"Application {app['application_id']} type: {type}, avg_time: {avg_time}")
time_elapsed = (datetime.utcnow() - app['created_at']).total_seconds()
logger.debug(
f"Application {app['application_id']} time_elapsed: {time_elapsed} seconds, "
f"estimated_remaining: {estimated_remaining} seconds"
)
estimated_remaining = float(avg_time) - time_elapsed
if estimated_remaining > 0:
predictions.append({
Expand Down

0 comments on commit 3dc5035

Please sign in to comment.