From a8621b8404b3cf00800c9709d8afe70aceee9f75 Mon Sep 17 00:00:00 2001 From: jxiao21 Date: Thu, 25 Jul 2024 11:12:42 -0400 Subject: [PATCH 1/7] initial commit with current changes --- config/cron_udp.hjson | 70 +++++++++++++++++++++---------------------- dashboard/cron.py | 4 ++- 2 files changed, 38 insertions(+), 36 deletions(-) diff --git a/config/cron_udp.hjson b/config/cron_udp.hjson index fd3beadd..25a6ce69 100644 --- a/config/cron_udp.hjson +++ b/config/cron_udp.hjson @@ -5,7 +5,7 @@ // and it wouldn’t reflect when the data was actually dumped from canvas. // More info on UDP's batch-ingest DAG process can be found here: https://resources.unizin.org/display/UDP/Batch-ingest+application ''' - SELECT * FROM EXTERNAL_QUERY("us.context_store", "select 'canvasdatadate' as pkey, min(dag_run) as pvalue from report.publish_info pi2"); + SELECT * FROM EXTERNAL_QUERY("unizin-shared.context_store", "select 'canvasdatadate' as pkey, min(dag_run) as pvalue from report.publish_info pi2"); ''', "user" : ''' @@ -27,18 +27,18 @@ when cse.role = 'Teacher' then 'TeacherEnrollment' else '' end as enrollment_type - from context_store_entity.course_section_enrollment cse - left join context_store_entity.course_section cs + from unizin-shared.context_store_entity.course_section_enrollment cse + left join unizin-shared.context_store_entity.course_section cs on cse.course_section_id = cs.course_section_id - left join context_store_keymap.course_offering co + left join unizin-shared.context_store_keymap.course_offering co on cs.le_current_course_offering_id = co.id - left join context_store_entity.person p + left join unizin-shared.context_store_entity.person p on cse.person_id = p.person_id - left join context_store_keymap.person p2 + left join unizin-shared.context_store_keymap.person p2 on p.person_id = p2.id - left join context_store_entity.person_email pe + left join unizin-shared.context_store_entity.person_email pe on p.person_id = pe.person_id - left join context_store_entity.course_grade cg + left join unizin-shared.context_store_entity.course_grade cg on cse.course_section_id = cg.course_section_id and cse.person_id = cg.person_id where co.lms_int_id IN UNNEST(@course_ids) @@ -51,7 +51,7 @@ ''' with assignment_details as ( select la.due_date, title, la.course_offering_id, la.learner_activity_id, la.points_possible, la.learner_activity_group_id - from context_store_entity.learner_activity la, context_store_keymap.course_offering co + from unizin-shared.context_store_entity.learner_activity la, unizin-shared.context_store_keymap.course_offering co where la.visibility = 'everyone' and la.status = 'published' @@ -59,7 +59,7 @@ and co.lms_int_id IN UNNEST(@course_ids) ), assignment_grp as ( select lg.* - from context_store_entity.learner_activity_group lg, context_store_keymap.course_offering co + from unizin-shared.context_store_entity.learner_activity_group lg, unizin-shared.context_store_keymap.course_offering co where lg.status = 'available' and lg.course_offering_id = co.id @@ -84,7 +84,7 @@ ), assign_rules as ( select distinct ad.learner_activity_group_id, agr.drop_lowest_amount as drop_lowest, agr.drop_highest_amount as drop_highest from grp_full ad - join context_store_entity.learner_activity_group agr + join unizin-shared.context_store_entity.learner_activity_group agr on ad.learner_activity_group_id = agr.learner_activity_group_id ), assignment_grp_points as ( select ag.*, am.group_points AS group_points, ar.drop_lowest as drop_lowest, ar.drop_highest as drop_highest @@ -101,8 +101,8 @@ agp.drop_lowest as drop_lowest, agp.drop_highest as drop_highest from assignment_grp_points agp, - context_store_keymap.course_offering co_km, - context_store_keymap.learner_activity_group lag_km + unizin-shared.context_store_keymap.course_offering co_km, + unizin-shared.context_store_keymap.learner_activity_group lag_km where agp.course_offering_id = co_km.id and agp.learner_activity_group_id = lag_km.id order by id @@ -119,10 +119,10 @@ la.points_possible as points_possible, cast(lag_km.lms_int_id as INT64) as assignment_group_id from - context_store_entity.learner_activity la, - context_store_keymap.course_offering co, - context_store_keymap.learner_activity la_km, - context_store_keymap.learner_activity_group lag_km + unizin-shared.context_store_entity.learner_activity la, + unizin-shared.context_store_keymap.course_offering co, + unizin-shared.context_store_keymap.learner_activity la_km, + unizin-shared.context_store_keymap.learner_activity_group lag_km where la.visibility = 'everyone' and la.status = 'published' @@ -145,8 +145,8 @@ cast(0 as boolean) end as consider_weight from - context_store_entity.learner_activity_group lag, - context_store_keymap.course_offering co_km + unizin-shared.context_store_entity.learner_activity_group lag, + unizin-shared.context_store_keymap.course_offering co_km where lag.course_offering_id = co_km.id and co_km.lms_int_id IN UNNEST(@course_ids) @@ -161,8 +161,8 @@ a.le_term_begin_date as date_start, a.le_term_end_date as date_end from - context_store_entity.academic_term as a - left join context_store_keymap.academic_term as ka on ka.id = a.academic_term_id + unizin-shared.context_store_entity.academic_term as a + left join unizin-shared.context_store_keymap.academic_term as ka on ka.id = a.academic_term_id where ka.lms_ext_id is not null order by id @@ -180,10 +180,10 @@ TIMESTAMP(co.le_start_date) as start_at, TIMESTAMP(co.le_end_date) as conclude_at FROM - context_store_entity.course_offering co - LEFT OUTER JOIN context_store_entity.academic_term at1 on (co.academic_term_id = at1.academic_term_id), - context_store_keymap.course_offering co2, - context_store_keymap.academic_term at2 + unizin-shared.context_store_entity.course_offering co + LEFT OUTER JOIN unizin-shared.context_store_entity.academic_term at1 on (co.academic_term_id = at1.academic_term_id), + unizin-shared.context_store_keymap.course_offering co2, + unizin-shared.context_store_keymap.academic_term at2 WHERE co2.lms_int_id IN UNNEST(@course_ids) and co.course_offering_id = co2.id and at1.academic_term_id = at2.id @@ -194,7 +194,7 @@ cast(f_km.lms_int_id as BIGINT) as id, f.status as file_state, f.display_name as display_name - from context_store_entity.file f, context_store_keymap.file f_km, context_store_keymap.course_offering co_km + from unizin-shared.context_store_entity.file f, unizin-shared.context_store_keymap.file f_km, unizin-shared.context_store_keymap.course_offering co_km where f.course_offering_id = co_km.id and f.file_id = f_km.id @@ -207,10 +207,10 @@ ( select distinct cse.person_id as user_id - from context_store_entity.course_section_enrollment cse - left join context_store_entity.course_section cs + from unizin-shared.context_store_entity.course_section_enrollment cse + left join unizin-shared.context_store_entity.course_section cs on cse.course_section_id = cs.course_section_id - left join context_store_keymap.course_offering co + left join unizin-shared.context_store_keymap.course_offering co on cs.le_current_course_offering_id = co.id where co.lms_int_id in UNNEST(@course_ids) @@ -236,14 +236,14 @@ lar.person_id as short_user_id, lar2.lms_int_id as submission_id, CAST(@canvas_data_id_increment AS INT64) + CAST(p.lms_ext_id AS INT64) as canvas_user_id - from context_store_entity.learner_activity_result lar + from unizin-shared.context_store_entity.learner_activity_result lar join enrollment on lar.person_id= enrollment.user_id join enrollment e on lar.person_id = e.user_id - join context_store_keymap.learner_activity_result lar2 on lar.learner_activity_result_id = lar2.id - left join context_store_entity.learner_activity la on lar.learner_activity_id = la.learner_activity_id - left join context_store_keymap.learner_activity la2 on la.learner_activity_id = la2.id - left join context_store_keymap.course_offering co on co.id = la.course_offering_id - join context_store_keymap.person p on p.id = lar.person_id + join unizin-shared.context_store_keymap.learner_activity_result lar2 on lar.learner_activity_result_id = lar2.id + left join unizin-shared.context_store_entity.learner_activity la on lar.learner_activity_id = la.learner_activity_id + left join unizin-shared.context_store_keymap.learner_activity la2 on la.learner_activity_id = la2.id + left join unizin-shared.context_store_keymap.course_offering co on co.id = la.course_offering_id + join unizin-shared.context_store_keymap.person p on p.id = lar.person_id where co.lms_int_id in UNNEST(@course_ids) and la.status = 'published' diff --git a/dashboard/cron.py b/dashboard/cron.py index 83cb58d7..b7c194ee 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -52,7 +52,7 @@ def setup_queries(self): def setup_bigquery(self): # Instantiates a client - self.bigquery_client = bigquery.Client() + self.bigquery_client = bigquery.Client(project="udp-umich-prod") # BQ Total Bytes Billed to report to status self.total_bytes_billed = 0 @@ -653,6 +653,7 @@ def do(self) -> str: status += self.update_term() + exception_in_run = False if len(self.valid_locked_course_ids) == 0: logger.info("Skipping course-related table updates...") status += "Skipped course-related table updates.\n" @@ -676,6 +677,7 @@ def do(self) -> str: status += str(e) exception_in_run = True + logger.info("** informational") status += self.update_unizin_metadata() all_str_course_ids = set( From a93b58490914d4bd95d1d784cf6ddddfa4444f45 Mon Sep 17 00:00:00 2001 From: jxiao21 Date: Tue, 30 Jul 2024 10:57:28 -0400 Subject: [PATCH 2/7] env_sample update --- config/env_sample.hjson | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/env_sample.hjson b/config/env_sample.hjson index f367b8db..e5c4c5f7 100644 --- a/config/env_sample.hjson +++ b/config/env_sample.hjson @@ -90,7 +90,7 @@ "ROOT_PASSWORD": "student_dashboard_root_pw" }, # Default Canvas Data id increment for course id, user id, etc - "CANVAS_DATA_ID_INCREMENT": 17700000000000000, + "CANVAS_DATA_ID_INCREMENT": 1000000000000, # Canvas Configuration "CANVAS_USER": "", # strings for construct file download url From 495f00aff6f5f495f76a402adeede1e116dc9842 Mon Sep 17 00:00:00 2001 From: jxiao21 Date: Wed, 31 Jul 2024 10:35:45 -0400 Subject: [PATCH 3/7] cron works --- dashboard/cron.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dashboard/cron.py b/dashboard/cron.py index b7c194ee..a863a796 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -52,7 +52,8 @@ def setup_queries(self): def setup_bigquery(self): # Instantiates a client - self.bigquery_client = bigquery.Client(project="udp-umich-prod") + # self.bigquery_client = bigquery.Client(project="udp-umich-prod") + self.bigquery_client = bigquery.Client(project="unizin-shared") # BQ Total Bytes Billed to report to status self.total_bytes_billed = 0 From 5d9a1571ade74ba13e7112633cf1fa7150dc5b5a Mon Sep 17 00:00:00 2001 From: "Code Hugger (Matthew Jones)" Date: Tue, 30 Jul 2024 12:57:12 -0400 Subject: [PATCH 4/7] Removing the hardcoded proejct ids and making it configurable. --- config/cron_udp.hjson | 70 ++++++++++++++++++++--------------------- config/env_sample.hjson | 8 ++++- dashboard/cron.py | 29 ++++++++--------- dashboard/settings.py | 6 ++++ 4 files changed, 63 insertions(+), 50 deletions(-) diff --git a/config/cron_udp.hjson b/config/cron_udp.hjson index 25a6ce69..aec0c2ea 100644 --- a/config/cron_udp.hjson +++ b/config/cron_udp.hjson @@ -5,7 +5,7 @@ // and it wouldn’t reflect when the data was actually dumped from canvas. // More info on UDP's batch-ingest DAG process can be found here: https://resources.unizin.org/display/UDP/Batch-ingest+application ''' - SELECT * FROM EXTERNAL_QUERY("unizin-shared.context_store", "select 'canvasdatadate' as pkey, min(dag_run) as pvalue from report.publish_info pi2"); + SELECT * FROM EXTERNAL_QUERY("context_store", "select 'canvasdatadate' as pkey, min(dag_run) as pvalue from report.publish_info pi2"); ''', "user" : ''' @@ -27,18 +27,18 @@ when cse.role = 'Teacher' then 'TeacherEnrollment' else '' end as enrollment_type - from unizin-shared.context_store_entity.course_section_enrollment cse - left join unizin-shared.context_store_entity.course_section cs + from context_store_entity.course_section_enrollment cse + left join context_store_entity.course_section cs on cse.course_section_id = cs.course_section_id - left join unizin-shared.context_store_keymap.course_offering co + left join context_store_keymap.course_offering co on cs.le_current_course_offering_id = co.id - left join unizin-shared.context_store_entity.person p + left join context_store_entity.person p on cse.person_id = p.person_id - left join unizin-shared.context_store_keymap.person p2 + left join context_store_keymap.person p2 on p.person_id = p2.id - left join unizin-shared.context_store_entity.person_email pe + left join context_store_entity.person_email pe on p.person_id = pe.person_id - left join unizin-shared.context_store_entity.course_grade cg + left join context_store_entity.course_grade cg on cse.course_section_id = cg.course_section_id and cse.person_id = cg.person_id where co.lms_int_id IN UNNEST(@course_ids) @@ -51,7 +51,7 @@ ''' with assignment_details as ( select la.due_date, title, la.course_offering_id, la.learner_activity_id, la.points_possible, la.learner_activity_group_id - from unizin-shared.context_store_entity.learner_activity la, unizin-shared.context_store_keymap.course_offering co + from context_store_entity.learner_activity la, context_store_keymap.course_offering co where la.visibility = 'everyone' and la.status = 'published' @@ -59,7 +59,7 @@ and co.lms_int_id IN UNNEST(@course_ids) ), assignment_grp as ( select lg.* - from unizin-shared.context_store_entity.learner_activity_group lg, unizin-shared.context_store_keymap.course_offering co + from context_store_entity.learner_activity_group lg, context_store_keymap.course_offering co where lg.status = 'available' and lg.course_offering_id = co.id @@ -84,7 +84,7 @@ ), assign_rules as ( select distinct ad.learner_activity_group_id, agr.drop_lowest_amount as drop_lowest, agr.drop_highest_amount as drop_highest from grp_full ad - join unizin-shared.context_store_entity.learner_activity_group agr + join context_store_entity.learner_activity_group agr on ad.learner_activity_group_id = agr.learner_activity_group_id ), assignment_grp_points as ( select ag.*, am.group_points AS group_points, ar.drop_lowest as drop_lowest, ar.drop_highest as drop_highest @@ -101,8 +101,8 @@ agp.drop_lowest as drop_lowest, agp.drop_highest as drop_highest from assignment_grp_points agp, - unizin-shared.context_store_keymap.course_offering co_km, - unizin-shared.context_store_keymap.learner_activity_group lag_km + context_store_keymap.course_offering co_km, + context_store_keymap.learner_activity_group lag_km where agp.course_offering_id = co_km.id and agp.learner_activity_group_id = lag_km.id order by id @@ -119,10 +119,10 @@ la.points_possible as points_possible, cast(lag_km.lms_int_id as INT64) as assignment_group_id from - unizin-shared.context_store_entity.learner_activity la, - unizin-shared.context_store_keymap.course_offering co, - unizin-shared.context_store_keymap.learner_activity la_km, - unizin-shared.context_store_keymap.learner_activity_group lag_km + context_store_entity.learner_activity la, + context_store_keymap.course_offering co, + context_store_keymap.learner_activity la_km, + context_store_keymap.learner_activity_group lag_km where la.visibility = 'everyone' and la.status = 'published' @@ -145,8 +145,8 @@ cast(0 as boolean) end as consider_weight from - unizin-shared.context_store_entity.learner_activity_group lag, - unizin-shared.context_store_keymap.course_offering co_km + context_store_entity.learner_activity_group lag, + context_store_keymap.course_offering co_km where lag.course_offering_id = co_km.id and co_km.lms_int_id IN UNNEST(@course_ids) @@ -161,8 +161,8 @@ a.le_term_begin_date as date_start, a.le_term_end_date as date_end from - unizin-shared.context_store_entity.academic_term as a - left join unizin-shared.context_store_keymap.academic_term as ka on ka.id = a.academic_term_id + context_store_entity.academic_term as a + left join context_store_keymap.academic_term as ka on ka.id = a.academic_term_id where ka.lms_ext_id is not null order by id @@ -180,10 +180,10 @@ TIMESTAMP(co.le_start_date) as start_at, TIMESTAMP(co.le_end_date) as conclude_at FROM - unizin-shared.context_store_entity.course_offering co - LEFT OUTER JOIN unizin-shared.context_store_entity.academic_term at1 on (co.academic_term_id = at1.academic_term_id), - unizin-shared.context_store_keymap.course_offering co2, - unizin-shared.context_store_keymap.academic_term at2 + context_store_entity.course_offering co + LEFT OUTER JOIN context_store_entity.academic_term at1 on (co.academic_term_id = at1.academic_term_id), + context_store_keymap.course_offering co2, + context_store_keymap.academic_term at2 WHERE co2.lms_int_id IN UNNEST(@course_ids) and co.course_offering_id = co2.id and at1.academic_term_id = at2.id @@ -194,7 +194,7 @@ cast(f_km.lms_int_id as BIGINT) as id, f.status as file_state, f.display_name as display_name - from unizin-shared.context_store_entity.file f, unizin-shared.context_store_keymap.file f_km, unizin-shared.context_store_keymap.course_offering co_km + from context_store_entity.file f, context_store_keymap.file f_km, context_store_keymap.course_offering co_km where f.course_offering_id = co_km.id and f.file_id = f_km.id @@ -207,10 +207,10 @@ ( select distinct cse.person_id as user_id - from unizin-shared.context_store_entity.course_section_enrollment cse - left join unizin-shared.context_store_entity.course_section cs + from context_store_entity.course_section_enrollment cse + left join context_store_entity.course_section cs on cse.course_section_id = cs.course_section_id - left join unizin-shared.context_store_keymap.course_offering co + left join context_store_keymap.course_offering co on cs.le_current_course_offering_id = co.id where co.lms_int_id in UNNEST(@course_ids) @@ -236,14 +236,14 @@ lar.person_id as short_user_id, lar2.lms_int_id as submission_id, CAST(@canvas_data_id_increment AS INT64) + CAST(p.lms_ext_id AS INT64) as canvas_user_id - from unizin-shared.context_store_entity.learner_activity_result lar + from context_store_entity.learner_activity_result lar join enrollment on lar.person_id= enrollment.user_id join enrollment e on lar.person_id = e.user_id - join unizin-shared.context_store_keymap.learner_activity_result lar2 on lar.learner_activity_result_id = lar2.id - left join unizin-shared.context_store_entity.learner_activity la on lar.learner_activity_id = la.learner_activity_id - left join unizin-shared.context_store_keymap.learner_activity la2 on la.learner_activity_id = la2.id - left join unizin-shared.context_store_keymap.course_offering co on co.id = la.course_offering_id - join unizin-shared.context_store_keymap.person p on p.id = lar.person_id + join context_store_keymap.learner_activity_result lar2 on lar.learner_activity_result_id = lar2.id + left join context_store_entity.learner_activity la on lar.learner_activity_id = la.learner_activity_id + left join context_store_keymap.learner_activity la2 on la.learner_activity_id = la2.id + left join context_store_keymap.course_offering co on co.id = la.course_offering_id + join context_store_keymap.person p on p.id = lar.person_id where co.lms_int_id in UNNEST(@course_ids) and la.status = 'published' diff --git a/config/env_sample.hjson b/config/env_sample.hjson index e5c4c5f7..90aab7ec 100644 --- a/config/env_sample.hjson +++ b/config/env_sample.hjson @@ -304,5 +304,11 @@ "COURSES_ENABLED": false, # Path to the hjson file contains cron queries - "CRON_QUERY_FILE": "config/cron_udp.hjson" + "CRON_QUERY_FILE": "config/cron_udp.hjson", + + # Change the default Bigquery Project ID + "DEFAULT_PROJECT_ID": "udp-umich-prod", + # Change the dataset project ID where queries are run against + "DATASET_PROJECT_ID": "unizin-shared" + } diff --git a/dashboard/cron.py b/dashboard/cron.py index a863a796..794cf4b9 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -100,24 +100,24 @@ def execute_bq_query(self, query: str, bq_job_config: Optional[bigquery.QueryJob # Remove the newlines from the query query = query.replace("\n", " ") - if bq_job_config: - try: - # Convert to bq schema object - query_job = self.bigquery_client.query(query, job_config=bq_job_config) - query_job_result = query_job.result() + # Create a new QueryJobConfig if none is provided + if bq_job_config is None: + bq_job_config = bigquery.QueryJobConfig() - self.total_bytes_billed += query_job.total_bytes_billed - logger.debug(f"This job had {query_job.total_bytes_billed} bytes. Total: {self.total_bytes_billed}") - return query_job_result - except Exception as e: - logger.error(f"Error ({str(e)}) in setting up schema for query {query}.") - raise Exception(e) - else: - query_job = self.bigquery_client.query(query) + # Add the dataset_project_id connection property to the job config + bq_job_config.connection_properties = [bigquery.ConnectionProperty("dataset_project_id", settings.DATASET_PROJECT_ID)] + + try: + # Convert to bq schema object + query_job = self.bigquery_client.query(query, job_config=bq_job_config) query_job_result = query_job.result() self.total_bytes_billed += query_job.total_bytes_billed logger.debug(f"This job had {query_job.total_bytes_billed} bytes. Total: {self.total_bytes_billed}") - return query_job_result + except Exception as e: + logger.error(f"Error ({str(e)}) in setting up schema for query {query}.") + raise Exception(e) + + return query_job_result # Execute a query against the MyLA database def execute_myla_query(self, query: str, params: Optional[Dict] = None) -> ResultProxy: @@ -314,6 +314,7 @@ def update_resource_access(self): 'canvas_event_urls', 'STRING', settings.CANVAS_EVENT_URLS)) job_config = bigquery.QueryJobConfig() job_config.query_parameters = query_params + job_config.connection_properties = [bigquery.ConnectionProperty("dataset_project_id", settings.DATASET_PROJECT_ID)] # Location must match that of the dataset(s) referenced in the query. bq_job = self.bigquery_client.query(final_query, location='US', job_config=job_config) diff --git a/dashboard/settings.py b/dashboard/settings.py index 99acf547..40065672 100644 --- a/dashboard/settings.py +++ b/dashboard/settings.py @@ -449,6 +449,12 @@ def apply_env_overrides(env: Dict[str, Any], environ: os._Environ) -> Dict[str, # Only need view permission for exports IMPORT_EXPORT_EXPORT_PERMISSION_CODE = 'view' +# Change the default project ID for BigQuery if needed (This is typically the one that quotas are run against and logged into) +DEFAULT_PROJECT_ID = ENV.get("DEFAULT_PROJECT_ID", None) + +# Override the default project ID for BigQuery if needed, like to unizin-shared +DATASET_PROJECT_ID = ENV.get("DATASET_PROJECT_ID", None) + # IMPORT LOCAL ENV # ===================== try: From 7976fb9128a2db99734048587daf1678390ae64d Mon Sep 17 00:00:00 2001 From: "Code Hugger (Matthew Jones)" Date: Wed, 31 Jul 2024 11:26:48 -0400 Subject: [PATCH 5/7] Use the value in settings as the default project --- dashboard/cron.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dashboard/cron.py b/dashboard/cron.py index 794cf4b9..b96d4e0c 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -52,8 +52,7 @@ def setup_queries(self): def setup_bigquery(self): # Instantiates a client - # self.bigquery_client = bigquery.Client(project="udp-umich-prod") - self.bigquery_client = bigquery.Client(project="unizin-shared") + self.bigquery_client = bigquery.Client(project=settings.DEFAULT_PROJECT_ID) # BQ Total Bytes Billed to report to status self.total_bytes_billed = 0 From 64d5b7bad1e04182d0d87560c139d9a7131cfd4d Mon Sep 17 00:00:00 2001 From: jxiao21 Date: Wed, 31 Jul 2024 16:43:50 -0400 Subject: [PATCH 6/7] added comment explaining why value 1000000000000 is used --- config/env_sample.hjson | 1 + 1 file changed, 1 insertion(+) diff --git a/config/env_sample.hjson b/config/env_sample.hjson index 90aab7ec..4257237d 100644 --- a/config/env_sample.hjson +++ b/config/env_sample.hjson @@ -90,6 +90,7 @@ "ROOT_PASSWORD": "student_dashboard_root_pw" }, # Default Canvas Data id increment for course id, user id, etc + # for Unizin synthetic data, the value is 1000000000000 "CANVAS_DATA_ID_INCREMENT": 1000000000000, # Canvas Configuration "CANVAS_USER": "", From 8d229a631771d8866111da8ac326437e6ed20edc Mon Sep 17 00:00:00 2001 From: "Code Hugger (Matthew Jones)" Date: Wed, 28 Aug 2024 16:28:10 -0400 Subject: [PATCH 7/7] Added back in metadata, added try/except if it fails --- config/cron_udp.hjson | 2 +- dashboard/cron.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/config/cron_udp.hjson b/config/cron_udp.hjson index aec0c2ea..d051b4ef 100644 --- a/config/cron_udp.hjson +++ b/config/cron_udp.hjson @@ -5,7 +5,7 @@ // and it wouldn’t reflect when the data was actually dumped from canvas. // More info on UDP's batch-ingest DAG process can be found here: https://resources.unizin.org/display/UDP/Batch-ingest+application ''' - SELECT * FROM EXTERNAL_QUERY("context_store", "select 'canvasdatadate' as pkey, min(dag_run) as pvalue from report.publish_info pi2"); + SELECT * FROM EXTERNAL_QUERY("us.context_store", "select 'canvasdatadate' as pkey, min(dag_run) as pvalue from report.publish_info pi2"); ''', "user" : ''' diff --git a/dashboard/cron.py b/dashboard/cron.py index b96d4e0c..853c83ba 100644 --- a/dashboard/cron.py +++ b/dashboard/cron.py @@ -226,7 +226,10 @@ def update_unizin_metadata(self): logger.debug(metadata_sql) - status += self.util_function(metadata_sql, 'unizin_metadata') + try: + status += self.util_function(metadata_sql, 'unizin_metadata') + except Exception as e: + logger.warn(f"Could not directly access metadata, this is likely just an issue when using synthetic data.") return status @@ -652,6 +655,7 @@ def do(self) -> str: # continue cron tasks + logger.info("** term") status += self.update_term() exception_in_run = False