From 97e572329e370b3763c98ecbfd14c8b33ad07130 Mon Sep 17 00:00:00 2001 From: Nikolai Kondrashov Date: Fri, 5 Jul 2024 13:50:14 +0300 Subject: [PATCH] cloud: Load submissions into PostgreSQL only, faster Remove loading submissions into BigQuery until we come up with a way to increase throughput (likely pulling chunks from PostgreSQL). This should help us deal with the backlog in production submission queue. We might also need to either switch to direct triggering of Cloud Functions by messages from the queue, to reduce latency, or simply switch to a persistent Cloud Run service. Concerns: https://github.com/kernelci/kcidb/issues/541 --- cloud | 9 +-------- kcidb/cloud/functions.sh | 2 +- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/cloud b/cloud index b0a35d9c..9c745c9a 100755 --- a/cloud +++ b/cloud @@ -217,14 +217,7 @@ function execute_command() { fi declare -a -r bigquery_args - declare -r database=$( - echo -n "mux: " - echo -n "$psql_kcidb_db" | escape_whitespace - echo -n " " - echo -n "$bigquery_kcidb_db" | escape_whitespace - echo -n " " - echo -n "$bigquery_sample_kcidb_db" | escape_whitespace - ) + declare -r database="$psql_kcidb_db" if "$test"; then declare -r sqlite_clean_test_file="$TMPDIR/clean.sqlite3" diff --git a/kcidb/cloud/functions.sh b/kcidb/cloud/functions.sh index d5ff271c..27e6a05d 100644 --- a/kcidb/cloud/functions.sh +++ b/kcidb/cloud/functions.sh @@ -228,7 +228,7 @@ function functions_deploy() { --env-vars-file "$env_yaml_file" \ --trigger-topic "${load_queue_trigger_topic}" \ --memory 1024MB \ - --max-instances=1 \ + --max-instances=4 \ --timeout 540 # Remove the environment YAML file rm "$env_yaml_file"