From 772085626c904dbf06429eed24a01f0a940edc05 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Mon, 10 Jul 2023 18:17:44 +0800 Subject: [PATCH 01/62] fix: move gce networking specific sleep to gce block --- run.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/run.sh b/run.sh index c0c2b1f97..b8ff41be9 100644 --- a/run.sh +++ b/run.sh @@ -2,19 +2,19 @@ # load secrets conditionally if [ -f /tmp/.secrets.env ] - then +then echo '/tmp/.secrets.env file present, loading secrets...'; export $(grep -v '^#' /tmp/.secrets.env | xargs); fi -# wait for networking to be ready before starting Erlang -echo 'Sleeping for 15 seconds...' -sleep 15 - if [[ "$LIBCLUSTER_TOPOLOGY" == "gce" ]] then # run gce specific stuff + # wait for networking to be ready before starting Erlang + echo 'Sleeping for 15 seconds for GCE networking to be ready...' + sleep 15 + sysctl -w net.ipv4.tcp_keepalive_time=60 net.ipv4.tcp_keepalive_intvl=60 net.ipv4.tcp_keepalive_probes=5 export LOGFLARE_NODE_HOST=$(curl \ From fdc36b4d003e9e0c279398634fa9000c29e8517c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Jul 2023 10:03:46 +0000 Subject: [PATCH 02/62] chore(deps): bump semver from 5.7.1 to 5.7.2 in /docs/docs.logflare.com Bumps [semver](https://github.com/npm/node-semver) from 5.7.1 to 5.7.2. - [Release notes](https://github.com/npm/node-semver/releases) - [Changelog](https://github.com/npm/node-semver/blob/v5.7.2/CHANGELOG.md) - [Commits](https://github.com/npm/node-semver/compare/v5.7.1...v5.7.2) --- updated-dependencies: - dependency-name: semver dependency-type: indirect ... Signed-off-by: dependabot[bot] --- docs/docs.logflare.com/package-lock.json | 144 +++++++++++------------ 1 file changed, 72 insertions(+), 72 deletions(-) diff --git a/docs/docs.logflare.com/package-lock.json b/docs/docs.logflare.com/package-lock.json index b3b57e839..66e0ecd80 100644 --- a/docs/docs.logflare.com/package-lock.json +++ b/docs/docs.logflare.com/package-lock.json @@ -232,9 +232,9 @@ } }, "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -306,9 +306,9 @@ } }, "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -365,9 +365,9 @@ } }, "node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -1615,9 +1615,9 @@ } }, "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -1827,9 +1827,9 @@ } }, "node_modules/@babel/preset-env/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -2733,9 +2733,9 @@ } }, "node_modules/@mdx-js/mdx/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "bin": { "semver": "bin/semver" } @@ -3951,9 +3951,9 @@ } }, "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -7653,9 +7653,9 @@ } }, "node_modules/make-dir/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -8270,9 +8270,9 @@ } }, "node_modules/package-json/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -9833,9 +9833,9 @@ } }, "node_modules/remark-mdx/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "bin": { "semver": "bin/semver" } @@ -10353,9 +10353,9 @@ } }, "node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "dependencies": { "lru-cache": "^6.0.0" }, @@ -10378,9 +10378,9 @@ } }, "node_modules/semver-diff/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } @@ -12682,9 +12682,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -12739,9 +12739,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -12782,9 +12782,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -13592,9 +13592,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -13749,9 +13749,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -14436,9 +14436,9 @@ } }, "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" }, "source-map": { "version": "0.5.7", @@ -15388,9 +15388,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -18053,9 +18053,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -18478,9 +18478,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, @@ -19574,9 +19574,9 @@ } }, "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" }, "source-map": { "version": "0.5.7", @@ -19923,9 +19923,9 @@ } }, "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "requires": { "lru-cache": "^6.0.0" } @@ -19939,9 +19939,9 @@ }, "dependencies": { "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, From 7d8b66e420cd58cd04ff8ccb6b1107bd9dd82631 Mon Sep 17 00:00:00 2001 From: Filipe Cabaco Date: Fri, 30 Jun 2023 18:39:57 +0100 Subject: [PATCH 03/62] add - Seed Supabase Mode to use Postgres Ingestion exclusively To simplify Supabase Mode (self hosted) we will move to support ingestion with Postgres and not use BigQuery --- .docker.env | 3 +- config/runtime.exs | 13 +- config/test.exs | 1 + lib/logflare/application.ex | 215 +++++++++--------- lib/logflare/backends.ex | 8 +- .../backends/adaptor/postgres_adaptor/repo.ex | 8 +- .../google/bigquery/gen_utils/gen_utils.ex | 7 +- lib/logflare/logs/logs.ex | 64 ++++-- lib/logflare/single_tenant.ex | 120 +++------- lib/logflare/source/supervisor.ex | 27 +-- lib/logflare/sources.ex | 41 ++-- lib/logflare_web/channels/source_channel.ex | 3 +- lib/logflare_web/live/source_backends_live.ex | 2 +- mix.exs | 3 +- .../adaptor/postgres_adaptor/repo_test.exs | 2 +- test/logflare/backends_test.exs | 4 +- test/logflare/endpoints_test.exs | 2 +- test/logflare/logs/logs_test.exs | 97 ++++---- test/logflare/single_tenant_test.exs | 55 +++-- .../health_check_controller_test.exs | 20 +- test/support/test_utils.ex | 10 - 21 files changed, 351 insertions(+), 354 deletions(-) diff --git a/.docker.env b/.docker.env index 1cf8e6aff..e25582d03 100644 --- a/.docker.env +++ b/.docker.env @@ -11,4 +11,5 @@ LOGFLARE_SUPABASE_MODE=true LOGFLARE_API_KEY=my-cool-api-key-123 GOOGLE_PROJECT_ID=logflare-dev-238720 GOOGLE_PROJECT_NUMBER=1023172132421 -LOGFLARE_GRPC_PORT=50051 \ No newline at end of file +LOGFLARE_GRPC_PORT=50051 +SINGLE_INSTANCE_POSTGRES_URL=postgresql://postgres:postgres@db:5432/logflare_docker \ No newline at end of file diff --git a/config/runtime.exs b/config/runtime.exs index 043ae5180..f05157ae7 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -10,9 +10,10 @@ config :logflare, recaptcha_secret: System.get_env("LOGFLARE_RECAPTCHA_SECRET"), config_cat_sdk_key: System.get_env("LOGFLARE_CONFIG_CAT_SDK_KEY"), single_tenant: System.get_env("LOGFLARE_SINGLE_TENANT"), - supabase_mode: System.get_env("LOGFLARE_SUPABASE_MODE"), + supabase_mode: System.get_env("LOGFLARE_SUPABASE_MODE", "false") == "true", api_key: System.get_env("LOGFLARE_API_KEY"), - cache_stats: System.get_env("LOGFLARE_CACHE_STATS", "false") == "true" + cache_stats: System.get_env("LOGFLARE_CACHE_STATS", "false") == "true", + single_instance_postgres_url: System.get_env("SINGLE_INSTANCE_POSTGRES_URL") ] |> filter_nil_kv_pairs.() @@ -33,8 +34,7 @@ config :logflare, value when is_binary(value) -> String.split(value, ",") end, live_view: - [signing_salt: System.get_env("PHX_LIVE_VIEW_SIGNING_SALT")] - |> filter_nil_kv_pairs.(), + filter_nil_kv_pairs.(signing_salt: System.get_env("PHX_LIVE_VIEW_SIGNING_SALT")), live_dashboard: System.get_env("LOGFLARE_ENABLE_LIVE_DASHBOARD", "false") == "true" ) @@ -186,7 +186,10 @@ config :stripity_stripe, ) if config_env() != :test do - config :goth, json: File.read!("gcloud.json") + if !Application.get_env(:logflare, :supabase_mode) && File.exists?("gcloud.json") do + config :goth, json: File.read!("gcloud.json") + end + config :grpc, port: System.get_env("LOGFLARE_GRPC_PORT", "50051") |> String.to_integer() end diff --git a/config/test.exs b/config/test.exs index 3a003e1a5..641f76c09 100644 --- a/config/test.exs +++ b/config/test.exs @@ -28,4 +28,5 @@ config :logflare, Logflare.Repo, pool_size: 10, pool: Ecto.Adapters.SQL.Sandbox +config :logflare, :postgres_backend_adapter, pool_size: 3 config :grpc, start_server: false diff --git a/lib/logflare/application.ex b/lib/logflare/application.ex index 669a57fb0..584a1f85a 100644 --- a/lib/logflare/application.ex +++ b/lib/logflare/application.ex @@ -24,143 +24,107 @@ defmodule Logflare.Application do children = get_children(env) - # See https://hexdocs.pm/elixir/Supervisor.html - # for other strategies and supported options opts = [strategy: :one_for_one, name: Logflare.Supervisor] Supervisor.start_link(children, opts) end - defp get_goth_child_spec() do - # Setup Goth for GCP connections - require Logger - credentials = Jason.decode!(Application.get_env(:goth, :json)) - scopes = ["https://www.googleapis.com/auth/cloud-platform"] - source = {:service_account, credentials, scopes: scopes} - {Goth, name: Logflare.Goth, source: source} + def config_change(changed, _new, removed) do + LogflareWeb.Endpoint.config_change(changed, removed) + :ok + end + + def start_phase(:seed_system, _, env: :test), do: :ok + + def start_phase(:seed_system, _, _) do + startup_tasks() + :ok end defp get_children(:test) do - [ - ContextCache, - Users.Cache, - Sources.Cache, - Billing.Cache, - SourceSchemas.Cache, - PubSubRates.Cache, - Logs.LogEvents.Cache, - Logs.RejectedLogEvents, - {Phoenix.PubSub, name: Logflare.PubSub}, - Logflare.Repo, - # get_goth_child_spec(), - LogflareWeb.Endpoint, - {Task.Supervisor, name: Logflare.TaskSupervisor}, - {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Endpoints.Cache}, - # v2 ingestion pipelines - {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.SourcesSup}, - {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.RecentLogsSup}, - {DynamicSupervisor, - strategy: :one_for_one, name: Logflare.Backends.Adaptor.PostgresAdaptor.Supervisor}, - {Registry, name: Logflare.Backends.SourceRegistry, keys: :unique}, - {Registry, name: Logflare.Backends.SourceDispatcher, keys: :duplicate} - ] ++ common_children() + cache_children() ++ + [ + Logflare.Repo, + Logs.RejectedLogEvents, + {Phoenix.PubSub, name: Logflare.PubSub, pool_size: 10}, + {Task.Supervisor, name: Logflare.TaskSupervisor} + ] ++ + v2_ingestion_pipeline_children() ++ + common_children() end defp get_children(_) do # Database options for Postgres notifications + + topologies = Application.get_env(:libcluster, :topologies, []) + + cache_children() ++ + [ + Logflare.Repo, + {Task.Supervisor, name: Logflare.TaskSupervisor}, + {Cluster.Supervisor, [topologies, [name: Logflare.ClusterSupervisor]]}, + Logs.RejectedLogEvents, + Sources.Counters, + Sources.RateCounters, + {Phoenix.PubSub, name: Logflare.PubSub, pool_size: 10}, + PubSubRates.Rates, + PubSubRates.Buffers, + PubSubRates.Inserts, + Logflare.Source.Supervisor, + + # If we get a log event and the Source.Supervisor is not up it will 500 + # Monitor system level metrics + Logflare.SystemMetricsSup + ] ++ + get_goth_children() ++ + replication_log_children() ++ + v2_ingestion_pipeline_children() ++ + grpc_children() ++ + conditional_children() ++ + common_children() + end + + defp replication_log_children() do hostname = '#{Application.get_env(:logflare, Logflare.Repo)[:hostname]}' username = Application.get_env(:logflare, Logflare.Repo)[:username] password = Application.get_env(:logflare, Logflare.Repo)[:password] database = Application.get_env(:logflare, Logflare.Repo)[:database] - port = Application.get_env(:logflare, Logflare.Repo)[:port] slot = Application.get_env(:logflare, Logflare.CacheBuster)[:replication_slot] publications = Application.get_env(:logflare, Logflare.CacheBuster)[:publications] - topologies = Application.get_env(:libcluster, :topologies, []) - grpc_port = Application.get_env(:grpc, :port) - ssl = Application.get_env(:logflare, :ssl) - grpc_creds = if ssl, do: GRPC.Credential.new(ssl: ssl) - [ - {Task.Supervisor, name: Logflare.TaskSupervisor}, - {Cluster.Supervisor, [topologies, [name: Logflare.ClusterSupervisor]]}, - get_goth_child_spec(), - Logflare.Repo, - {Phoenix.PubSub, name: Logflare.PubSub, pool_size: 10}, - # supervisor(LogflareTelemetry.Supervisor, []), - # Context Caches - ContextCache, - Users.Cache, - Sources.Cache, - Billing.Cache, - SourceSchemas.Cache, - PubSubRates.Cache, - Logs.LogEvents.Cache, - - # Follow Postgresql replication log and bust all our context caches - { - Cainophile.Adapters.Postgres, - register: Logflare.PgPublisher, - epgsql: %{ - host: hostname, - port: port, - username: username, - database: database, - password: password - }, - slot: slot, - wal_position: {"0", "0"}, - publications: publications + opts = [ + register: Logflare.PgPublisher, + epgsql: %{ + host: hostname, + port: port, + username: username, + database: database, + password: password }, - Logflare.CacheBuster, - - # Sources - Logs.RejectedLogEvents, - # init Counters before Supervisof as Supervisor calls Counters through table create - Sources.Counters, - Sources.RateCounters, - PubSubRates.Rates, - PubSubRates.Buffers, - PubSubRates.Inserts, - Logflare.Source.Supervisor, - - # If we get a log event and the Source.Supervisor is not up it will 500 - LogflareWeb.Endpoint, - {GRPC.Server.Supervisor, {LogflareGrpc.Endpoint, grpc_port, cred: grpc_creds}}, - # Monitor system level metrics - Logflare.SystemMetricsSup, - - # For Logflare Endpoints - {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Endpoints.Cache}, - - # Startup tasks - {Task, fn -> startup_tasks() end}, - - # v2 ingestion pipelines + slot: slot, + wal_position: {"0", "0"}, + publications: publications + ] + + [{Cainophile.Adapters.Postgres, opts}, Logflare.CacheBuster] + end + + defp v2_ingestion_pipeline_children() do + [ {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.SourcesSup}, {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.RecentLogsSup}, {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.Adaptor.PostgresAdaptor.Supervisor}, {Registry, name: Logflare.Backends.SourceRegistry, keys: :unique}, {Registry, name: Logflare.Backends.SourceDispatcher, keys: :duplicate} - ] ++ conditional_children() ++ common_children() + ] end - def conditional_children do + defp conditional_children do config_cat_key = Application.get_env(:logflare, :config_cat_sdk_key) # only add in config cat to multi-tenant prod - if config_cat_key do - [ - {ConfigCat, [sdk_key: config_cat_key]} - ] - else - [] - end - end - - def config_change(changed, _new, removed) do - LogflareWeb.Endpoint.config_change(changed, removed) - :ok + if(config_cat_key, do: [{ConfigCat, [sdk_key: config_cat_key]}], else: []) end defp common_children do @@ -168,10 +132,46 @@ defmodule Logflare.Application do # Finch connection pools, using http2 {Finch, name: Logflare.FinchIngest, pools: %{:default => [protocol: :http2, count: 200]}}, {Finch, name: Logflare.FinchQuery, pools: %{:default => [protocol: :http2, count: 100]}}, - {Finch, name: Logflare.FinchDefault, pools: %{:default => [protocol: :http2, count: 50]}} + {Finch, name: Logflare.FinchDefault, pools: %{:default => [protocol: :http2, count: 50]}}, + LogflareWeb.Endpoint + ] + end + + defp get_goth_children() do + # Setup Goth for GCP connections + case Application.get_env(:logflare, :supabase_mode) do + true -> + [] + + false -> + credentials = Jason.decode!(Application.get_env(:goth, :json)) + scopes = ["https://www.googleapis.com/auth/cloud-platform"] + source = {:service_account, credentials, scopes: scopes} + [{Goth, name: Logflare.Goth, source: source}] + end + end + + defp cache_children() do + [ + ContextCache, + Users.Cache, + Sources.Cache, + Billing.Cache, + SourceSchemas.Cache, + PubSubRates.Cache, + Logs.LogEvents.Cache, + {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Endpoints.Cache} ] end + defp grpc_children() do + grpc_port = Application.get_env(:grpc, :port) + ssl = Application.get_env(:logflare, :ssl) + grpc_creds = if ssl, do: GRPC.Credential.new(ssl: ssl) + + [{GRPC.Server.Supervisor, {LogflareGrpc.Endpoint, grpc_port, cred: grpc_creds}}] + end + def startup_tasks do # if single tenant, insert enterprise user Logger.info("Executing startup tasks") @@ -189,7 +189,6 @@ defmodule Logflare.Application do # buffer time for all sources to init and create tables # in case of latency. :timer.sleep(3_000) - SingleTenant.update_supabase_source_schemas() end end end diff --git a/lib/logflare/backends.ex b/lib/logflare/backends.ex index 3cfa48fe2..468748f19 100644 --- a/lib/logflare/backends.ex +++ b/lib/logflare/backends.ex @@ -1,7 +1,7 @@ defmodule Logflare.Backends do @moduledoc false - alias Logflare.Backends.Adaptor.WebhookAdaptor alias Logflare.Backends.Adaptor.PostgresAdaptor + alias Logflare.Backends.Adaptor.WebhookAdaptor alias Logflare.Backends.RecentLogs alias Logflare.Backends.RecentLogsSup alias Logflare.Backends.SourceBackend @@ -184,8 +184,8 @@ defmodule Logflare.Backends do @spec start_source_sup(Source.t()) :: :ok | {:error, :already_started} def start_source_sup(%Source{} = source) do case DynamicSupervisor.start_child(SourcesSup, {SourceSup, source}) do - {:ok, _pid} -> :ok - {:error, {:already_started = reason, _pid}} -> {:error, reason} + {:ok, pid} -> {:ok, pid} + {:error, {:already_started, _pid}} -> {:error, :already_started} end end @@ -209,7 +209,7 @@ defmodule Logflare.Backends do :ok | {:error, :already_started} | {:error, :not_started} def restart_source_sup(%Source{} = source) do with :ok <- stop_source_sup(source), - :ok <- start_source_sup(source) do + {:ok, _} <- start_source_sup(source) do :ok end end diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex index eb7a61c04..82e1ea687 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex @@ -38,7 +38,13 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do @spec connect_to_source_backend(Ecto.Repo.t(), SourceBackend.t(), Keyword.t()) :: :ok def connect_to_source_backend(repository_module, %SourceBackend{config: config}, opts \\ []) do unless Process.whereis(repository_module) do - opts = [{:url, config["url"]} | opts] + pool_size = Keyword.get(Application.get_env(:logflare, :postgres_backend_adapter), :pool_size, 10) + + opts = [ + {:url, config["url"] || config.url}, + {:name, repository_module}, + {:pool_size, pool_size} | opts + ] {:ok, _} = DynamicSupervisor.start_child(Supervisor, repository_module.child_spec(opts)) end diff --git a/lib/logflare/google/bigquery/gen_utils/gen_utils.ex b/lib/logflare/google/bigquery/gen_utils/gen_utils.ex index d3479e99e..70a97aa97 100644 --- a/lib/logflare/google/bigquery/gen_utils/gen_utils.ex +++ b/lib/logflare/google/bigquery/gen_utils/gen_utils.ex @@ -72,8 +72,9 @@ defmodule Logflare.Google.BigQuery.GenUtils do @typep conn_type :: :ingest | :query | :default @spec get_conn(conn_type()) :: Tesla.Env.client() def get_conn(conn_type \\ :default) do - Goth.fetch(Logflare.Goth) - |> case do + Logflare.Goth + |> Goth.fetch() + |> then(fn {:ok, %Goth.Token{} = goth} -> Connection.new(goth.token) @@ -81,7 +82,7 @@ defmodule Logflare.Google.BigQuery.GenUtils do Logger.error("Goth error!", error_string: inspect(reason)) # This is going to give us an unauthorized connection but we are handling it downstream. Connection.new("") - end + end) # dynamically set tesla adapter |> Map.update!(:adapter, fn _value -> build_tesla_adapter_call(conn_type) end) end diff --git a/lib/logflare/logs/logs.ex b/lib/logflare/logs/logs.ex index d70345777..6c3c54a4a 100644 --- a/lib/logflare/logs/logs.ex +++ b/lib/logflare/logs/logs.ex @@ -2,41 +2,49 @@ defmodule Logflare.Logs do @moduledoc false require Logger - alias Logflare.LogEvent, as: LE - alias Logflare.Logs.{RejectedLogEvents} - alias Logflare.{SystemMetrics, Source, Sources} - alias Logflare.Source.{BigQuery.BufferCounter, RecentLogsServer} - alias Logflare.Logs.SourceRouting - alias Logflare.Logs.IngestTypecasting + alias Logflare.LogEvent alias Logflare.Logs.IngestTransformers - alias Logflare.Source.Supervisor + alias Logflare.Logs.IngestTypecasting + alias Logflare.Logs.RejectedLogEvents + alias Logflare.Logs.SourceRouting alias Logflare.Rule + alias Logflare.Source + alias Logflare.Source.BigQuery.BufferCounter + alias Logflare.Source.RecentLogsServer + alias Logflare.Source.Supervisor + alias Logflare.Sources + alias Logflare.SystemMetrics @spec ingest_logs(list(map), Source.t()) :: :ok | {:error, term} - def ingest_logs(log_params_batch, %Source{rules: rules} = source) when is_list(rules) do + def ingest_logs(log_params_batch, %Source{rules: rules} = source) + when is_list(rules) do log_params_batch |> Enum.map(fn log -> log |> IngestTypecasting.maybe_apply_transform_directives() |> IngestTransformers.transform(:to_bigquery_column_spec) - |> LE.make(%{source: source}) + |> LogEvent.make(%{source: source}) |> maybe_mark_le_dropped_by_lql() |> maybe_ingest_and_broadcast() end) - |> Enum.reduce([], fn le, acc -> - if le.valid do - acc - else - [le.validation_error | acc] - end + |> Enum.reduce([], fn + %{valid: true}, acc -> acc + le, acc -> [le.validation_error | acc] end) - |> case do + |> then(fn [] -> :ok errors when is_list(errors) -> {:error, errors} + end) + end + + def ingest(%LogEvent{source: %Source{} = source} = le) do + case Logflare.SingleTenant.supabase_mode?() do + true -> ingest(:backends, source, le) + false -> ingest(:bigquery, source, le) end end - def ingest(%LE{source: %Source{} = source} = le) do + defp ingest(:bigquery, source, le) do # indvididual source genservers Supervisor.ensure_started(source.token) @@ -49,28 +57,34 @@ defmodule Logflare.Logs do Sources.Counters.increment(source.token) SystemMetrics.AllLogsLogged.increment(:total_logs_logged) - :ok end - def broadcast(%LE{} = le) do + defp ingest(:backends, source, le) do + Logflare.Backends.ingest_logs([le], source) + end + + def broadcast(%LogEvent{} = le) do if le.source.metrics.avg < 5 do Source.ChannelTopics.broadcast_new(le) end end - def maybe_mark_le_dropped_by_lql(%LE{source: %{drop_lql_string: drop_lql_string}} = le) + def maybe_mark_le_dropped_by_lql(%LogEvent{source: %{drop_lql_string: drop_lql_string}} = le) when is_nil(drop_lql_string) do le end def maybe_mark_le_dropped_by_lql( - %LE{body: _body, source: %{drop_lql_string: drop_lql_string, drop_lql_filters: filters}} = - le + %LogEvent{ + body: _body, + source: %{drop_lql_string: drop_lql_string, drop_lql_filters: filters} + } = le ) when is_binary(drop_lql_string) do cond do - length(filters) >= 1 && SourceRouting.route_with_lql_rules?(le, %Rule{lql_filters: filters}) -> + length(filters) >= 1 && + SourceRouting.route_with_lql_rules?(le, %Rule{lql_filters: filters}) -> Map.put(le, :drop, true) true -> @@ -78,7 +92,7 @@ defmodule Logflare.Logs do end end - defp maybe_ingest_and_broadcast(%LE{} = le) do + defp maybe_ingest_and_broadcast(%LogEvent{} = le) do cond do le.drop -> le @@ -86,7 +100,7 @@ defmodule Logflare.Logs do le.valid -> le |> tap(&SourceRouting.route_to_sinks_and_ingest/1) - |> LE.apply_custom_event_message() + |> LogEvent.apply_custom_event_message() |> tap(&ingest/1) # use module reference namespace for Mimic mocking |> tap(&__MODULE__.broadcast/1) diff --git a/lib/logflare/single_tenant.ex b/lib/logflare/single_tenant.ex index cec0890c1..b8ec76fe0 100644 --- a/lib/logflare/single_tenant.ex +++ b/lib/logflare/single_tenant.ex @@ -2,17 +2,16 @@ defmodule Logflare.SingleTenant do @moduledoc """ Handles single tenant-related logic """ - alias Logflare.Users + alias Logflare.Backends alias Logflare.Billing alias Logflare.Billing.Plan + alias Logflare.Endpoints alias Logflare.Endpoints.Query alias Logflare.Source - alias Logflare.Sources - alias Logflare.Endpoints alias Logflare.Repo - alias Logflare.Source.Supervisor - alias Logflare.Source.BigQuery.Schema - alias Logflare.LogEvent + alias Logflare.Sources + alias Logflare.Users + require Logger @user_attrs %{ @@ -24,6 +23,7 @@ defmodule Logflare.SingleTenant do provider_uid: "default", endpoints_beta: true } + @plan_attrs %{ name: "Enterprise", period: "year", @@ -50,7 +50,16 @@ defmodule Logflare.SingleTenant do "postgREST.logs.prod", "pgbouncer.logs.prod" ] + @endpoint_params [ + %{ + name: "test", + query: "select body from 'cloudflare.logs.prod'", + sandboxable: true, + max_limit: 1000, + enable_auth: true, + cache_duration_seconds: 0 + }, %{ name: "logs.all", query: @@ -96,9 +105,7 @@ defmodule Logflare.SingleTenant do """ def get_default_plan do Billing.list_plans() - |> Enum.find(fn plan -> - @plan_attrs = plan - end) + |> Enum.find(fn plan -> @plan_attrs = plan end) end @doc """ @@ -121,14 +128,11 @@ defmodule Logflare.SingleTenant do """ @spec create_default_plan() :: {:ok, Plan.t()} | {:error, :already_created} def create_default_plan do - plan = - Billing.list_plans() - |> Enum.find(fn plan -> plan.name == "Enterprise" end) + plan = Billing.list_plans() |> Enum.find(fn plan -> plan.name == "Enterprise" end) - if plan == nil do - Billing.create_plan(@plan_attrs) - else - {:error, :already_created} + case plan do + nil -> Billing.create_plan(@plan_attrs) + _ -> {:error, :already_created} end end @@ -144,7 +148,10 @@ defmodule Logflare.SingleTenant do sources = for name <- @source_names do # creating a source will automatically start the source's RLS process - {:ok, source} = Sources.create_source(%{name: name}, user) + url = Application.get_env(:logflare, :single_instance_postgres_url) + {:ok, source} = Sources.create_source(%{name: name, v2_pipeline: true}, user) + {:ok, _} = Backends.create_source_backend(source, :postgres, %{url: url}) + source end @@ -159,17 +166,16 @@ defmodule Logflare.SingleTenant do Note: not tested as `Logflare.Source.Supervisor` is a pain to mock. TODO: add testing for v2 """ - @spec ensure_supabase_sources_started() :: :ok + @spec ensure_supabase_sources_started() :: list() def ensure_supabase_sources_started do user = get_default_user() if user do for source <- Sources.list_sources_by_user(user) do - Supervisor.ensure_started(source.token) + source = Repo.preload(source, :source_backends) + Logflare.Backends.start_source_sup(source) end end - - :ok end @doc """ @@ -202,33 +208,6 @@ defmodule Logflare.SingleTenant do @spec supabase_mode? :: boolean() def supabase_mode?, do: !!Application.get_env(:logflare, :supabase_mode) and single_tenant?() - @doc """ - Adds ingestion samples for supabase sources, so that schema is built and stored correctly. - """ - @spec update_supabase_source_schemas :: nil - def update_supabase_source_schemas do - if supabase_mode?() do - user = get_default_user() - - sources = - Sources.list_sources_by_user(user) - |> Repo.preload(:rules) - - tasks = - for source <- sources do - Task.async(fn -> - source = Sources.refresh_source_metrics_for_ingest(source) - Logger.debug("Updating schemas for for #{source.name}") - event = read_ingest_sample_json(source.name) - log_event = LogEvent.make(event, %{source: source}) - Schema.update(source.token, log_event) - end) - end - - Task.await_many(tasks) - end - end - @doc """ Returns the status of supabase mode setup process. Possible statuses: :ok, nil @@ -241,49 +220,18 @@ defmodule Logflare.SingleTenant do seed_plan = if default_plan, do: :ok seed_sources = - if default_user do - if Sources.list_sources_by_user(default_user) |> length() > 0, do: :ok - end + default_user && + Sources.list_sources_by_user(default_user) + |> Enum.map(&Backends.source_sup_started?/1) + |> Enum.count(& &1) - seed_endpoints = - if default_user do - if Endpoints.list_endpoints_by(user_id: default_user.id) |> length() > 0, do: :ok - end - - source_schemas_updated = if supabase_mode_source_schemas_updated?(), do: :ok + seed_endpoints = default_user && Endpoints.list_endpoints_by(user_id: default_user.id) %{ seed_user: seed_user, seed_plan: seed_plan, - seed_sources: seed_sources, - seed_endpoints: seed_endpoints, - source_schemas_updated: source_schemas_updated + seed_sources: if(seed_sources > 0, do: :ok), + seed_endpoints: if(seed_endpoints > 0, do: :ok) } end - - def supabase_mode_source_schemas_updated? do - user = get_default_user() - - if user do - sources = Sources.list_sources_by_user(user) - - checks = - for source <- sources, - source.name in @source_names, - state = Schema.get_state(source.token) do - state.field_count > 3 - end - - Enum.all?(checks) and length(sources) > 0 - else - false - end - end - - # Read a source ingest sample json file - defp read_ingest_sample_json(source_name) do - Application.app_dir(:logflare, "priv/supabase/ingest_samples/#{source_name}.json") - |> File.read!() - |> Jason.decode!() - end end diff --git a/lib/logflare/source/supervisor.ex b/lib/logflare/source/supervisor.ex index ca4047cc0..74c83400c 100644 --- a/lib/logflare/source/supervisor.ex +++ b/lib/logflare/source/supervisor.ex @@ -179,24 +179,25 @@ defmodule Logflare.Source.Supervisor do defp create_source(source_id) do # Double check source is in the database before starting # Can be removed when manager fns move into their own genserver + if !Logflare.SingleTenant.supabase_mode?() do + source = Sources.get_by(token: source_id) - source = Sources.get_by(token: source_id) + if source do + rls = %RLS{source_id: source_id, source: source} - if source do - rls = %RLS{source_id: source_id, source: source} + children = [ + Supervisor.child_spec({RLS, rls}, id: source_id, restart: :transient) + ] - children = [ - Supervisor.child_spec({RLS, rls}, id: source_id, restart: :transient) - ] + # fire off async init in async task, so that bq call does not block. + Tasks.start_child(fn -> init_table(source_id) end) - # fire off async init in async task, so that bq call does not block. - Tasks.start_child(fn -> - init_table(source_id) - end) - - Supervisor.start_link(children, strategy: :one_for_one, max_restarts: 10, max_seconds: 60) + Supervisor.start_link(children, strategy: :one_for_one, max_restarts: 10, max_seconds: 60) + else + {:error, :not_found_in_db} + end else - {:error, :not_found_in_db} + {:error, :supabase_mode} end end diff --git a/lib/logflare/sources.ex b/lib/logflare/sources.ex index e662f6353..b4f1011f4 100644 --- a/lib/logflare/sources.ex +++ b/lib/logflare/sources.ex @@ -5,6 +5,7 @@ defmodule Logflare.Sources do import Ecto.Query, only: [from: 2] + alias Logflare.SingleTenant alias Logflare.Cluster alias Logflare.Google.BigQuery.GenUtils alias Logflare.Google.BigQuery.SchemaUtils @@ -40,29 +41,33 @@ defmodule Logflare.Sources do @spec create_source(map(), User.t()) :: {:ok, Source.t()} | {:error, Ecto.Changeset.t()} def create_source(source_params, user) do - source = - user - |> Ecto.build_assoc(:sources) - |> Source.update_by_user_changeset(source_params) - |> Repo.insert() + user + |> Ecto.build_assoc(:sources) + |> Source.update_by_user_changeset(source_params) + |> Repo.insert() + |> initialize_source(SingleTenant.supabase_mode?()) + end - case source do - {:ok, source} -> - init_schema = SchemaBuilder.initial_table_schema() + defp initialize_source({:ok, source}, true) do + {:ok, source} + end - {:ok, _source_schema} = - SourceSchemas.create_source_schema(source, %{ - bigquery_schema: init_schema, - schema_flat_map: SchemaUtils.bq_schema_to_flat_typemap(init_schema) - }) + defp initialize_source({:ok, source}, false) do + init_schema = SchemaBuilder.initial_table_schema() - Source.Supervisor.start_source(source.token) + {:ok, _source_schema} = + SourceSchemas.create_source_schema(source, %{ + bigquery_schema: init_schema, + schema_flat_map: SchemaUtils.bq_schema_to_flat_typemap(init_schema) + }) - {:ok, source} + Source.Supervisor.start_source(source.token) - {:error, changeset} -> - {:error, changeset} - end + {:ok, source} + end + + defp initialize_source({:error, changeset}, _) do + {:error, changeset} end @doc """ diff --git a/lib/logflare_web/channels/source_channel.ex b/lib/logflare_web/channels/source_channel.ex index 2350fb6d4..0a3003f82 100644 --- a/lib/logflare_web/channels/source_channel.ex +++ b/lib/logflare_web/channels/source_channel.ex @@ -22,7 +22,8 @@ defmodule LogflareWeb.SourceChannel do true socket.assigns[:user] -> - Enum.map(socket.assigns[:user].sources, & &1.token) + socket.assigns[:user].sources + |> Enum.map(& &1.token) |> Enum.member?(String.to_existing_atom(source_token)) true -> diff --git a/lib/logflare_web/live/source_backends_live.ex b/lib/logflare_web/live/source_backends_live.ex index 888246e83..0bbd753e8 100644 --- a/lib/logflare_web/live/source_backends_live.ex +++ b/lib/logflare_web/live/source_backends_live.ex @@ -86,7 +86,7 @@ defmodule LogflareWeb.SourceBackendsLive do %{assigns: %{source: source}} = socket socket = - case Logflare.Backends.create_source_backend(source, params["type"], params) do + case Backends.create_source_backend(source, params["type"], params) do {:ok, _} -> assign(socket, :show_create_form, false) diff --git a/mix.exs b/mix.exs index c7a1f8b1e..43d778ed5 100644 --- a/mix.exs +++ b/mix.exs @@ -51,7 +51,8 @@ defmodule Logflare.Mixfile do :crypto, :os_mon ], - included_applications: [:mnesia] + included_applications: [:mnesia], + start_phases: [seed_system: [env: Mix.env()]] ] end diff --git a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs index 9ad3a04d0..882ea0728 100644 --- a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs +++ b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs @@ -52,7 +52,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do Ecto.Migrator.run(repository_module, Repo.migrations(source_backend), :down, all: true) migration_table = Keyword.get(repository_module.config(), :migration_source) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") - true = repository_module |> Process.whereis() |> Process.exit(:kill) + true = repository_module |> Process.whereis() |> Process.exit(:normal) end) %{repository_module: repository_module} diff --git a/test/logflare/backends_test.exs b/test/logflare/backends_test.exs index 7c37324c7..54d66a59d 100644 --- a/test/logflare/backends_test.exs +++ b/test/logflare/backends_test.exs @@ -91,14 +91,14 @@ defmodule Logflare.BackendsTest do end test "start_source_sup/1, stop_source_sup/1, restart_source_sup/1", %{source: source} do - assert :ok = Backends.start_source_sup(source) + assert {:ok, _} = Backends.start_source_sup(source) assert {:error, :already_started} = Backends.start_source_sup(source) assert :ok = Backends.stop_source_sup(source) assert {:error, :not_started} = Backends.stop_source_sup(source) assert {:error, :not_started} = Backends.restart_source_sup(source) - assert :ok = Backends.start_source_sup(source) + assert {:ok, _} = Backends.start_source_sup(source) assert :ok = Backends.restart_source_sup(source) end end diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index 4ac9fcc89..d0d4662f4 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -228,7 +228,7 @@ defmodule Logflare.EndpointsTest do Ecto.Migrator.run(repository_module, Repo.migrations(source_backend), :down, all: true) migration_table = Keyword.get(repository_module.config(), :migration_source) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") - true = repository_module |> Process.whereis() |> Process.exit(:kill) + true = repository_module |> Process.whereis() |> Process.exit(:normal) end) %{source: source, user: user} diff --git a/test/logflare/logs/logs_test.exs b/test/logflare/logs/logs_test.exs index 73c648ec8..53087e1ee 100644 --- a/test/logflare/logs/logs_test.exs +++ b/test/logflare/logs/logs_test.exs @@ -1,9 +1,11 @@ defmodule Logflare.LogsTest do @moduledoc false use Logflare.DataCase + + alias Logflare.Backends + alias Logflare.LogEvent alias Logflare.Logs alias Logflare.Lql - # v1 pipeline alias Logflare.Source.RecentLogsServer alias Logflare.Sources.Counters alias Logflare.Sources.RateCounters @@ -32,8 +34,7 @@ defmodule Logflare.LogsTest do setup do # mock goth behaviour - Goth - |> stub(:fetch, fn _mod -> {:ok, %Goth.Token{token: "auth-token"}} end) + stub(Goth, :fetch, fn _mod -> {:ok, %Goth.Token{token: "auth-token"}} end) :ok end @@ -42,46 +43,37 @@ defmodule Logflare.LogsTest do describe "ingest input" do test "empty list", %{source: source} do - Logs - |> Mimic.reject(:broadcast, 1) + Mimic.reject(Logs, :broadcast, 1) assert :ok = Logs.ingest_logs([], source) end test "message key gets converted to event_message", %{source: source} do - Logs - |> expect(:broadcast, 1, fn le -> + expect(Logs, :broadcast, 1, fn le -> assert %{"event_message" => "testing 123"} = le.body assert Map.keys(le.body) |> length() == 3 le end) - batch = [ - %{"message" => "testing 123"} - ] + batch = [%{"message" => "testing 123"}] assert :ok = Logs.ingest_logs(batch, source) end test "top level keys", %{source: source} do - batch = [ - %{"event_message" => "testing 123", "other" => 123} - ] + batch = [%{"event_message" => "testing 123", "other" => 123}] assert :ok = Logs.ingest_logs(batch, source) end test "non-map value for metadata key", %{source: source} do - Logs - |> expect(:broadcast, 1, fn le -> + expect(Logs, :broadcast, 1, fn le -> assert %{"metadata" => "some_value"} = le.body le end) - batch = [ - %{"event_message" => "any", "metadata" => "some_value"} - ] + batch = [%{"event_message" => "any", "metadata" => "some_value"}] assert :ok = Logs.ingest_logs(batch, source) end @@ -89,12 +81,11 @@ defmodule Logflare.LogsTest do describe "full ingestion pipeline test" do test "additive schema update from log event", %{source: source} do - GoogleApi.BigQuery.V2.Api.Tabledata - |> expect(:bigquery_tabledata_insert_all, fn conn, - _project_id, - _dataset_id, - _table_name, - opts -> + expect(GoogleApi.BigQuery.V2.Api.Tabledata, :bigquery_tabledata_insert_all, fn conn, + _project_id, + _dataset_id, + _table_name, + opts -> assert {Tesla.Adapter.Finch, :call, [[name: Logflare.FinchIngest, receive_timeout: _]]} = conn.adapter @@ -103,12 +94,11 @@ defmodule Logflare.LogsTest do {:ok, %GoogleApi.BigQuery.V2.Model.TableDataInsertAllResponse{insertErrors: nil}} end) - GoogleApi.BigQuery.V2.Api.Tables - |> expect(:bigquery_tables_patch, fn conn, - _project_id, - _dataset_id, - _table_name, - [body: body] -> + expect(GoogleApi.BigQuery.V2.Api.Tables, :bigquery_tables_patch, fn conn, + _project_id, + _dataset_id, + _table_name, + [body: body] -> # use default config adapter assert conn.adapter == nil schema = body.schema @@ -116,12 +106,9 @@ defmodule Logflare.LogsTest do {:ok, %{}} end) - Logflare.Mailer - |> expect(:deliver, fn _ -> :ok end) + expect(Logflare.Mailer, :deliver, fn _ -> :ok end) - batch = [ - %{"event_message" => "testing 123", "key" => "value"} - ] + batch = [%{"event_message" => "testing 123", "key" => "value"}] assert :ok = Logs.ingest_logs(batch, source) :timer.sleep(1_500) @@ -135,19 +122,15 @@ defmodule Logflare.LogsTest do drop_test = insert(:source, user: user, drop_lql_string: "testing", drop_lql_filters: lql_filters) - Logs - |> Mimic.reject(:broadcast, 1) + Mimic.reject(Logs, :broadcast, 1) - batch = [ - %{"event_message" => "testing 123"} - ] + batch = [%{"event_message" => "testing 123"}] assert :ok = Logs.ingest_logs(batch, drop_test) end test "no rules", %{source: source} do - Logs - |> expect(:broadcast, 2, fn le -> le end) + expect(Logs, :broadcast, 2, fn le -> le end) batch = [ %{"event_message" => "routed"}, @@ -161,8 +144,7 @@ defmodule Logflare.LogsTest do insert(:rule, lql_string: "testing", sink: target.token, source_id: source.id) source = source |> Repo.preload(:rules, force: true) - Logs - |> expect(:broadcast, 3, fn le -> le end) + expect(Logs, :broadcast, 3, fn le -> le end) batch = [ %{"event_message" => "not routed"}, @@ -176,8 +158,7 @@ defmodule Logflare.LogsTest do insert(:rule, regex: "routed123", sink: target.token, source_id: source.id) source = source |> Repo.preload(:rules, force: true) - Logs - |> expect(:broadcast, 3, fn le -> le end) + expect(Logs, :broadcast, 3, fn le -> le end) batch = [ %{"event_message" => "not routed"}, @@ -193,10 +174,30 @@ defmodule Logflare.LogsTest do insert(:rule, lql_string: "testing", sink: other_target.token, source_id: target.id) source = source |> Repo.preload(:rules, force: true) - Logs - |> expect(:broadcast, 2, fn le -> le end) + expect(Logs, :broadcast, 2, fn le -> le end) assert :ok = Logs.ingest_logs([%{"event_message" => "testing 123"}], source) end end + + describe "ingest for supabase_mode" do + TestUtils.setup_single_tenant(seed_user: true, supabase_mode: true) + + setup do + user = insert(:user) + source = insert(:source, user: user) + {:ok, pid} = Backends.start_source_sup(source) + + on_exit(fn -> + Process.exit(pid, :normal) + end) + + %{source: source} + end + + test "ingest logs into backends", %{source: source} do + le = LogEvent.make(%{body: %{"event_message" => "testing 123"}}, %{source: source}) + :ok = Logs.ingest(le) + end + end end diff --git a/test/logflare/single_tenant_test.exs b/test/logflare/single_tenant_test.exs index 4e5d69a64..25c8c1ac1 100644 --- a/test/logflare/single_tenant_test.exs +++ b/test/logflare/single_tenant_test.exs @@ -1,14 +1,14 @@ defmodule Logflare.SingleTenantTest do - @moduledoc false - use Logflare.DataCase - alias Logflare.SingleTenant + use Logflare.DataCase, async: false + alias Logflare.Billing - alias Logflare.Users - alias Logflare.User alias Logflare.Billing.Plan - alias Logflare.Sources alias Logflare.Endpoints - alias Logflare.Source.BigQuery.Schema + alias Logflare.Repo + alias Logflare.SingleTenant + alias Logflare.Sources + alias Logflare.User + alias Logflare.Users describe "single tenant mode" do TestUtils.setup_single_tenant() @@ -62,25 +62,38 @@ defmodule Logflare.SingleTenantTest do TestUtils.setup_single_tenant(seed_user: true, supabase_mode: true) setup do - stub(Schema, :update, fn _token, _le -> :ok end) - :ok + %{username: username, password: password, database: database, hostname: hostname} = + Application.get_env(:logflare, Logflare.Repo) |> Map.new() + + url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" + previous_url = Application.get_env(:logflare, :single_instance_postgres_url) + Application.put_env(:logflare, :single_instance_postgres_url, url) + + on_exit(fn -> + Application.put_env(:logflare, :single_instance_postgres_url, previous_url) + end) + + %{url: url} end - test "create_supabase_sources/0, create_supabase_endpoints/0" do - assert {:ok, [_ | _]} = SingleTenant.create_supabase_sources() + test "create_supabase_sources/0, create_supabase_endpoints/0", %{url: url} do + assert {:ok, sources} = SingleTenant.create_supabase_sources() assert {:error, :already_created} = SingleTenant.create_supabase_sources() + assert [url] == + sources + |> Enum.map(&Repo.preload(&1, :source_backends)) + |> Enum.map(fn %{source_backends: [%{config: %{"url" => url}}]} -> url end) + |> Enum.uniq() + # must have sources created first assert {:ok, [_ | _]} = SingleTenant.create_supabase_endpoints() assert {:error, :already_created} = SingleTenant.create_supabase_endpoints() end test "startup tasks inserts log sources/endpoints" do - expect(Schema, :update, 9, fn _source_token, _log_event -> :ok end) - SingleTenant.create_supabase_sources() SingleTenant.create_supabase_endpoints() - SingleTenant.update_supabase_source_schemas() user = SingleTenant.get_default_user() sources = Sources.list_sources_by_user(user) @@ -89,22 +102,20 @@ defmodule Logflare.SingleTenantTest do end test "supabase_mode_status/0" do - stub(Schema, :get_state, fn _ -> %{field_count: 3} end) SingleTenant.create_supabase_sources() + SingleTenant.create_supabase_endpoints() + started = SingleTenant.ensure_supabase_sources_started() |> Enum.map(&elem(&1, 1)) assert %{ seed_user: :ok, seed_plan: :ok, seed_sources: :ok, - seed_endpoints: nil, - source_schemas_updated: nil + seed_endpoints: :ok } = SingleTenant.supabase_mode_status() - stub(Schema, :get_state, fn _ -> %{field_count: 5} end) - - assert %{ - source_schemas_updated: :ok - } = SingleTenant.supabase_mode_status() + on_exit(fn -> + Enum.each(started, &Process.exit(&1, :normal)) + end) end end end diff --git a/test/logflare_web/controllers/health_check_controller_test.exs b/test/logflare_web/controllers/health_check_controller_test.exs index 663f5b40e..0741cdb80 100644 --- a/test/logflare_web/controllers/health_check_controller_test.exs +++ b/test/logflare_web/controllers/health_check_controller_test.exs @@ -3,7 +3,6 @@ defmodule LogflareWeb.HealthCheckControllerTest do For node-level health check only. """ use LogflareWeb.ConnCase - alias Logflare.Source.BigQuery.Schema alias Logflare.SingleTenant test "normal node health check", %{conn: conn} do @@ -26,14 +25,29 @@ defmodule LogflareWeb.HealthCheckControllerTest do TestUtils.setup_single_tenant(seed_user: true, supabase_mode: true) setup do - stub(Schema, :get_state, fn _ -> %{field_count: 5} end) - :ok + %{username: username, password: password, database: database, hostname: hostname} = + Application.get_env(:logflare, Logflare.Repo) |> Map.new() + + url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" + previous_url = Application.get_env(:logflare, :single_instance_postgres_url) + Application.put_env(:logflare, :single_instance_postgres_url, url) + + on_exit(fn -> + Application.put_env(:logflare, :single_instance_postgres_url, previous_url) + end) + + %{url: url} end test "ok", %{conn: conn} do SingleTenant.create_supabase_sources() SingleTenant.create_supabase_endpoints() + started = SingleTenant.ensure_supabase_sources_started() |> Enum.map(&elem(&1, 1)) assert %{"status" => "ok"} = conn |> get("/health") |> json_response(200) + + on_exit(fn -> + Enum.each(started, &Process.exit(&1, :normal)) + end) end end end diff --git a/test/support/test_utils.ex b/test/support/test_utils.ex index 174e84861..cc27388de 100644 --- a/test/support/test_utils.ex +++ b/test/support/test_utils.ex @@ -28,16 +28,6 @@ defmodule Logflare.TestUtils do quote do setup do - # conditionally update bigquery project id - initial_google_config = Application.get_env(:logflare, Logflare.Google) - replacement_project_id = unquote(opts.bigquery_project_id) - updated = Keyword.put(initial_google_config, :project_id, replacement_project_id) - Application.put_env(:logflare, Logflare.Google, updated) - - on_exit(fn -> - Application.put_env(:logflare, Logflare.Google, initial_google_config) - end) - # perform application env adjustments at runtime initial_single_tenant = Application.get_env(:logflare, :single_tenant) Application.put_env(:logflare, :single_tenant, true) From 614e889706326f31a0534baaad955ca96bfb5624 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Mon, 10 Jul 2023 20:52:24 +0800 Subject: [PATCH 04/62] chore: rename SINGLE_INSTANCE_POSTGRES_URL to POSTGRES_BACKEND_URL --- .docker.env | 2 +- config/runtime.exs | 2 +- lib/logflare/single_tenant.ex | 2 +- test/logflare/single_tenant_test.exs | 6 +++--- .../controllers/health_check_controller_test.exs | 6 +++--- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.docker.env b/.docker.env index e25582d03..93f3e567e 100644 --- a/.docker.env +++ b/.docker.env @@ -12,4 +12,4 @@ LOGFLARE_API_KEY=my-cool-api-key-123 GOOGLE_PROJECT_ID=logflare-dev-238720 GOOGLE_PROJECT_NUMBER=1023172132421 LOGFLARE_GRPC_PORT=50051 -SINGLE_INSTANCE_POSTGRES_URL=postgresql://postgres:postgres@db:5432/logflare_docker \ No newline at end of file +POSTGRES_BACKEND_URL=postgresql://postgres:postgres@db:5432/logflare_docker \ No newline at end of file diff --git a/config/runtime.exs b/config/runtime.exs index f05157ae7..cf29a8800 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -13,7 +13,7 @@ config :logflare, supabase_mode: System.get_env("LOGFLARE_SUPABASE_MODE", "false") == "true", api_key: System.get_env("LOGFLARE_API_KEY"), cache_stats: System.get_env("LOGFLARE_CACHE_STATS", "false") == "true", - single_instance_postgres_url: System.get_env("SINGLE_INSTANCE_POSTGRES_URL") + postgres_backend_url: System.get_env("POSTGRES_BACKEND_URL") ] |> filter_nil_kv_pairs.() diff --git a/lib/logflare/single_tenant.ex b/lib/logflare/single_tenant.ex index b8ec76fe0..fb2037236 100644 --- a/lib/logflare/single_tenant.ex +++ b/lib/logflare/single_tenant.ex @@ -148,7 +148,7 @@ defmodule Logflare.SingleTenant do sources = for name <- @source_names do # creating a source will automatically start the source's RLS process - url = Application.get_env(:logflare, :single_instance_postgres_url) + url = Application.get_env(:logflare, :postgres_backend_url) {:ok, source} = Sources.create_source(%{name: name, v2_pipeline: true}, user) {:ok, _} = Backends.create_source_backend(source, :postgres, %{url: url}) diff --git a/test/logflare/single_tenant_test.exs b/test/logflare/single_tenant_test.exs index 25c8c1ac1..8cc16856e 100644 --- a/test/logflare/single_tenant_test.exs +++ b/test/logflare/single_tenant_test.exs @@ -66,11 +66,11 @@ defmodule Logflare.SingleTenantTest do Application.get_env(:logflare, Logflare.Repo) |> Map.new() url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" - previous_url = Application.get_env(:logflare, :single_instance_postgres_url) - Application.put_env(:logflare, :single_instance_postgres_url, url) + previous_url = Application.get_env(:logflare, :postgres_backend_url) + Application.put_env(:logflare, :postgres_backend_url, url) on_exit(fn -> - Application.put_env(:logflare, :single_instance_postgres_url, previous_url) + Application.put_env(:logflare, :postgres_backend_url, previous_url) end) %{url: url} diff --git a/test/logflare_web/controllers/health_check_controller_test.exs b/test/logflare_web/controllers/health_check_controller_test.exs index 0741cdb80..31ad66f0b 100644 --- a/test/logflare_web/controllers/health_check_controller_test.exs +++ b/test/logflare_web/controllers/health_check_controller_test.exs @@ -29,11 +29,11 @@ defmodule LogflareWeb.HealthCheckControllerTest do Application.get_env(:logflare, Logflare.Repo) |> Map.new() url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" - previous_url = Application.get_env(:logflare, :single_instance_postgres_url) - Application.put_env(:logflare, :single_instance_postgres_url, url) + previous_url = Application.get_env(:logflare, :postgres_backend_url) + Application.put_env(:logflare, :postgres_backend_url, url) on_exit(fn -> - Application.put_env(:logflare, :single_instance_postgres_url, previous_url) + Application.put_env(:logflare, :postgres_backend_url, previous_url) end) %{url: url} From ac9e57b1caf0de97e1b3d6316410e57832839629 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Tue, 11 Jul 2023 16:49:14 +0800 Subject: [PATCH 05/62] chore: adjust PostgresAdaptor.Repo references --- .../backends/adaptor/postgres_adaptor/repo.ex | 3 +- .../postgres_adaptor/pipeline_test.exs | 11 +++---- .../adaptor/postgres_adaptor/repo_test.exs | 32 ++++++++++++------- test/logflare/endpoints_test.exs | 10 +++--- 4 files changed, 33 insertions(+), 23 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex index 82e1ea687..64d718866 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex @@ -38,7 +38,8 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do @spec connect_to_source_backend(Ecto.Repo.t(), SourceBackend.t(), Keyword.t()) :: :ok def connect_to_source_backend(repository_module, %SourceBackend{config: config}, opts \\ []) do unless Process.whereis(repository_module) do - pool_size = Keyword.get(Application.get_env(:logflare, :postgres_backend_adapter), :pool_size, 10) + pool_size = + Keyword.get(Application.get_env(:logflare, :postgres_backend_adapter), :pool_size, 10) opts = [ {:url, config["url"] || config.url}, diff --git a/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs b/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs index 5c49ac3a6..23d2be0b6 100644 --- a/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs +++ b/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs @@ -5,7 +5,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PipelineTest do alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.Adaptor.PostgresAdaptor.LogEvent alias Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo alias Logflare.Buffers.MemoryBuffer import Ecto.Query @@ -21,7 +20,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PipelineTest do source_backend = insert(:source_backend, type: :postgres, config: %{"url" => url}, source: source) - repository_module = Repo.new_repository_for_source_backend(source_backend) + repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) pipeline_name = Backends.via_source_backend(source_backend, Pipeline) memory_buffer_pid = start_supervised!(MemoryBuffer) @@ -35,15 +34,15 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PipelineTest do } :ok = - Repo.connect_to_source_backend(repository_module, source_backend, + PostgresAdaptor.Repo.connect_to_source_backend(repository_module, source_backend, pool: Ecto.Adapters.SQL.Sandbox ) Ecto.Adapters.SQL.Sandbox.mode(repository_module, :auto) - :ok = Repo.create_log_event_table(repository_module, source_backend) + :ok = PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) on_exit(fn -> - Ecto.Migrator.run(repository_module, Repo.migrations(source_backend), :down, all: true) + Ecto.Migrator.run(repository_module, PostgresAdaptor.Repo.migrations(source_backend), :down, all: true) migration_table = Keyword.get(repository_module.config(), :migration_source) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") true = repository_module |> Process.whereis() |> Process.exit(:kill) @@ -75,7 +74,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PipelineTest do fetcher = fn -> repository_module.all( - from(l in Repo.table_name(source_backend), + from(l in PostgresAdaptor.Repo.table_name(source_backend), select: %LogEvent{ id: l.id, body: l.body, diff --git a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs index 882ea0728..4eafa33ee 100644 --- a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs +++ b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs @@ -1,8 +1,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do use Logflare.DataCase, async: false - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo - alias Logflare.Backends.Adaptor.PostgresAdaptor.LogEvent + alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest.BadMigration import Ecto.Query @@ -25,12 +24,12 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do describe "new_repository_for_source_backend/1" do test "creates a new Ecto.Repo for given source_backend", %{source_backend: source_backend} do - repository_module = Repo.new_repository_for_source_backend(source_backend) + repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) assert Keyword.get(repository_module.__info__(:attributes), :behaviour) == [Ecto.Repo] end test "name of the module uses source_id", %{source_backend: source_backend} do - repository_module = Repo.new_repository_for_source_backend(source_backend) + repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) assert repository_module == Module.concat([Logflare.Repo.Postgres, "Adaptor#{source_backend.source.token}"]) @@ -39,17 +38,23 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do describe "create_log_event_table/1" do setup %{source_backend: source_backend} do - repository_module = Repo.new_repository_for_source_backend(source_backend) + repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) :ok = - Repo.connect_to_source_backend(repository_module, source_backend, + PostgresAdaptor.Repo.connect_to_source_backend(repository_module, source_backend, pool: Ecto.Adapters.SQL.Sandbox ) Ecto.Adapters.SQL.Sandbox.mode(repository_module, :auto) on_exit(fn -> - Ecto.Migrator.run(repository_module, Repo.migrations(source_backend), :down, all: true) + Ecto.Migrator.run( + repository_module, + PostgresAdaptor.Repo.migrations(source_backend), + :down, + all: true + ) + migration_table = Keyword.get(repository_module.config(), :migration_source) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") true = repository_module |> Process.whereis() |> Process.exit(:normal) @@ -62,8 +67,13 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do source_backend: source_backend, repository_module: repository_module } do - assert Repo.create_log_event_table(repository_module, source_backend) == :ok - query = from(l in Repo.table_name(source_backend), select: LogEvent) + assert PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) == :ok + + query = + from(l in PostgresAdaptor.Repo.table_name(source_backend), + select: PostgresAdaptor.LogEvent + ) + assert repository_module.all(query) == [] end @@ -73,11 +83,11 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do } do bad_migrations = [{0, BadMigration}] - assert Repo.create_log_event_table( + assert {:error, :failed_migration} = PostgresAdaptor.Repo.create_log_event_table( repository_module, source_backend, bad_migrations - ) == {:error, :failed_migration} + ) end end diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index d0d4662f4..0a2b72328 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -3,7 +3,7 @@ defmodule Logflare.EndpointsTest do alias Logflare.Endpoints alias Logflare.Endpoints.Query - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo + alias Logflare.Backends.Adaptor.PostgresAdaptor test "list_endpoints_by" do %{id: id, name: name} = insert(:endpoint) @@ -215,17 +215,17 @@ defmodule Logflare.EndpointsTest do source: source ) - repository_module = Repo.new_repository_for_source_backend(source_backend) + repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) :ok = - Repo.connect_to_source_backend(repository_module, source_backend, + PostgresAdaptor.Repo.connect_to_source_backend(repository_module, source_backend, pool: Ecto.Adapters.SQL.Sandbox ) - :ok = Repo.create_log_event_table(repository_module, source_backend) + :ok = PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) on_exit(fn -> - Ecto.Migrator.run(repository_module, Repo.migrations(source_backend), :down, all: true) + Ecto.Migrator.run(repository_module, PostgresAdaptor.Repo.migrations(source_backend), :down, all: true) migration_table = Keyword.get(repository_module.config(), :migration_source) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") true = repository_module |> Process.whereis() |> Process.exit(:normal) From 2531c5a9b7b954d7d08e8f3fa79253d5a80f597b Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 02:27:55 +0800 Subject: [PATCH 06/62] chore: organize and simlpify tests --- lib/logflare/backends/adaptor.ex | 10 +- .../backends/adaptor/postgres_adaptor.ex | 68 +++++++++-- .../backends/adaptor/postgres_adaptor/repo.ex | 3 +- .../postgres_adaptor/pipeline_test.exs | 109 ------------------ .../adaptor/postgres_adaptor/repo_test.exs | 11 +- .../backends/postgres_adaptor_test.exs | 52 +++++++++ test/logflare/endpoints_test.exs | 8 +- test/support/factory.ex | 17 ++- 8 files changed, 148 insertions(+), 130 deletions(-) delete mode 100644 test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs create mode 100644 test/logflare/backends/postgres_adaptor_test.exs diff --git a/lib/logflare/backends/adaptor.ex b/lib/logflare/backends/adaptor.ex index 15d3cdc28..d2eb927f5 100644 --- a/lib/logflare/backends/adaptor.ex +++ b/lib/logflare/backends/adaptor.ex @@ -20,7 +20,8 @@ defmodule Logflare.Backends.Adaptor do @doc """ Queries the backend using an endpoint query. """ - @callback execute_query(identifier(), [%Query{}]) :: {:ok, term()} | {:error, :not_queryable} + @typep query :: Query.t() | Ecto.Query.t() | String.t() + @callback execute_query(identifier(), query()) :: {:ok, [term()]} | {:error, :not_queryable} @doc """ Typecasts config params. @@ -34,10 +35,12 @@ defmodule Logflare.Backends.Adaptor do defmacro __using__(_opts) do quote do - @behaviour Adaptor + @behaviour Logflare.Backends.Adaptor + @impl true def queryable?(), do: false + @impl true def execute_query(_pid, _query) do if function_exported?(__MODULE__, :queryable, 0) do raise "queryable?/0 callback implemented but query execution callback has not been implemented yet!" @@ -46,11 +49,14 @@ defmodule Logflare.Backends.Adaptor do end end + @impl true def ingest(_pid, _log_events), do: raise("Ingest callback not implemented!") + @impl true def validate_config(_config_changeset), do: raise("Config validation callback not implemented!") + @impl true def cast_config(_config), do: raise("Config casting callback not implemented!") def cast_and_validate_config(params) do diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 1f25c8f71..475728171 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -18,13 +18,14 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do use GenServer use TypedStruct use Logflare.Backends.Adaptor + alias Logflare.Backends.Adaptor + @behaviour Logflare.Backends.Adaptor alias Logflare.Backends alias Logflare.Backends.SourceBackend alias Logflare.Backends.SourceDispatcher alias Logflare.Buffers.MemoryBuffer alias Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo typedstruct enforce: true do field(:buffer_module, Adaptor.t()) @@ -44,9 +45,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do with source_id <- source_backend.source_id, {:ok, _} <- Registry.register(SourceDispatcher, source_id, {__MODULE__, :ingest}), {:ok, buffer_pid} <- MemoryBuffer.start_link([]), - repository_module <- Repo.new_repository_for_source_backend(source_backend), - :ok <- Repo.connect_to_source_backend(repository_module, source_backend), - :ok <- Repo.create_log_event_table(repository_module, source_backend) do + repository_module <- __MODULE__.Repo.new_repository_for_source_backend(source_backend), + :ok <- __MODULE__.Repo.connect_to_source_backend(repository_module, source_backend), + :ok <- __MODULE__.Repo.create_log_event_table(repository_module, source_backend) do state = %__MODULE__{ buffer_module: MemoryBuffer, buffer_pid: buffer_pid, @@ -61,26 +62,79 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do end end - @impl true + @impl Logflare.Backends.Adaptor def ingest(pid, log_events), do: GenServer.call(pid, {:ingest, log_events}) - @impl true + @impl Adaptor def cast_config(params) do {%{}, %{url: :string}} |> Ecto.Changeset.cast(params, [:url]) end - @impl true + @impl Adaptor def validate_config(changeset) do changeset |> Ecto.Changeset.validate_required([:url]) |> Ecto.Changeset.validate_format(:url, ~r/postgresql?\:\/\/.+/) end + @impl Adaptor + def queryable?(), do: true + + @impl Adaptor + def execute_query(pid, query) do + GenServer.call(pid, {:execute_query, query}) + end + + @doc """ + Rolls back all migrations + """ + @spec rollback_migrations(SourceBackend.t()) :: :ok + def rollback_migrations(source_backend) do + repository_module = __MODULE__.Repo.new_repository_for_source_backend(source_backend) + + Ecto.Migrator.run( + repository_module, + __MODULE__.Repo.migrations(source_backend), + :down, + all: true + ) + + :ok + # GenServer.call(pid, :rollback_migrations) + end + + @doc """ + Drops the migration table + """ + @spec drop_migrations_table(SourceBackend.t()) :: :ok + def drop_migrations_table(source_backend) do + repository_module = __MODULE__.Repo.new_repository_for_source_backend(source_backend) + migrations_table = migrations_table_name(source_backend) + Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migrations_table}") + :ok + # GenServer.call(pid, :drop_migrations_table) + end + + @doc """ + Returns the migrations table name used for a given source + """ + @spec migrations_table_name(SourceBackend.t()) :: String.t() + def migrations_table_name(%SourceBackend{source_id: source_id}) do + "schema_migrations_#{source_id}" + end + # GenServer @impl true def handle_call({:ingest, log_events}, _from, %{config: _config} = state) do MemoryBuffer.add_many(state.buffer_pid, log_events) {:reply, :ok, state} end + + @impl true + def handle_call({:execute_query, %Ecto.Query{select: select} = query}, _from, state) do + mod = state.repository_module + result = mod.all(query) + {:reply, result, state} + end end diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex index 64d718866..e3263ffed 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex @@ -7,6 +7,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do """ alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo.Migrations.AddLogEvents alias Logflare.Backends.Adaptor.PostgresAdaptor.Supervisor + alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.SourceBackend alias Logflare.Repo alias Logflare.Source @@ -29,7 +30,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do _ -> {:module, _, _, _} = Module.create(name, @ast, Macro.Env.location(__ENV__)) end - migration_table = "schema_migrations_#{source_backend.source_id}" + migration_table = PostgresAdaptor.migrations_table_name(source_backend) Application.put_env(:logflare, name, migration_source: migration_table) name diff --git a/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs b/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs deleted file mode 100644 index 23d2be0b6..000000000 --- a/test/logflare/backends/adaptor/postgres_adaptor/pipeline_test.exs +++ /dev/null @@ -1,109 +0,0 @@ -defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PipelineTest do - use Logflare.DataCase, async: false - - alias Logflare.Backends - alias Logflare.Backends.Adaptor.PostgresAdaptor - alias Logflare.Backends.Adaptor.PostgresAdaptor.LogEvent - alias Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline - alias Logflare.Buffers.MemoryBuffer - - import Ecto.Query - - setup do - %{username: username, password: password, database: database, hostname: hostname} = - Application.get_env(:logflare, Logflare.Repo) |> Map.new() - - url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" - - source = insert(:source, user: insert(:user)) - - source_backend = - insert(:source_backend, type: :postgres, config: %{"url" => url}, source: source) - - repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) - pipeline_name = Backends.via_source_backend(source_backend, Pipeline) - memory_buffer_pid = start_supervised!(MemoryBuffer) - - state = %PostgresAdaptor{ - buffer_module: MemoryBuffer, - buffer_pid: memory_buffer_pid, - config: source_backend.config, - source_backend: source_backend, - pipeline_name: pipeline_name, - repository_module: repository_module - } - - :ok = - PostgresAdaptor.Repo.connect_to_source_backend(repository_module, source_backend, - pool: Ecto.Adapters.SQL.Sandbox - ) - - Ecto.Adapters.SQL.Sandbox.mode(repository_module, :auto) - :ok = PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) - - on_exit(fn -> - Ecto.Migrator.run(repository_module, PostgresAdaptor.Repo.migrations(source_backend), :down, all: true) - migration_table = Keyword.get(repository_module.config(), :migration_source) - Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") - true = repository_module |> Process.whereis() |> Process.exit(:kill) - end) - - {:ok, _} = Pipeline.start_link(state) - - %{ - memory_buffer_pid: memory_buffer_pid, - repository_module: repository_module, - source_backend: source_backend - } - end - - describe "postgres ingestion" do - test "ingests dispatched message", %{ - memory_buffer_pid: memory_buffer_pid, - repository_module: repository_module, - source_backend: source_backend - } do - log_event = - build(:log_event, - token: TestUtils.random_string(), - source: source_backend.source, - body: %{"data" => "data"} - ) - - MemoryBuffer.add(memory_buffer_pid, log_event) - - fetcher = fn -> - repository_module.all( - from(l in PostgresAdaptor.Repo.table_name(source_backend), - select: %LogEvent{ - id: l.id, - body: l.body, - event_message: l.event_message, - timestamp: l.timestamp - } - ) - ) - end - - asserts = fn - [] -> - :retry - - [res_log_event] -> - assert log_event.id == res_log_event.id - assert log_event.body == res_log_event.body - assert log_event.body["event_message"] == res_log_event.event_message - - expected_timestamp = - res_log_event.timestamp - |> DateTime.from_naive!("Etc/UTC") - |> DateTime.to_unix(:microsecond) - - assert log_event.body["timestamp"] == expected_timestamp - end - - TestUtils.retry_fetch(fetcher, asserts) - assert_received(:done) - end - end -end diff --git a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs index 4eafa33ee..4a7825439 100644 --- a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs +++ b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs @@ -83,11 +83,12 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do } do bad_migrations = [{0, BadMigration}] - assert {:error, :failed_migration} = PostgresAdaptor.Repo.create_log_event_table( - repository_module, - source_backend, - bad_migrations - ) + assert {:error, :failed_migration} = + PostgresAdaptor.Repo.create_log_event_table( + repository_module, + source_backend, + bad_migrations + ) end end diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs new file mode 100644 index 000000000..e80919e83 --- /dev/null +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -0,0 +1,52 @@ +defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do + use Logflare.DataCase, async: false + + alias Logflare.Backends.Adaptor.PostgresAdaptor + + import Ecto.Query + + setup do + repo = Application.get_env(:logflare, Logflare.Repo) + + url = + "postgresql://#{repo[:username]}:#{repo[:password]}@#{repo[:hostname]}/#{repo[:database]}" + + config = %{ + "url" => url + } + source = insert(:source, user: insert(:user) ) + source_backend = insert(:source_backend, type: :postgres, source: source, config: config) + pid = start_supervised!({PostgresAdaptor, source_backend}) + + on_exit(fn -> + PostgresAdaptor.rollback_migrations(source_backend) + PostgresAdaptor.drop_migrations_table(source_backend) + end) + + %{pid: pid, source_backend: source_backend} + end + + test "ingest/2 and execute_query/2 dispatched message", %{pid: pid, source_backend: source_backend} do + log_event = + build(:log_event, + source: source_backend.source, + test: "data" + ) + + assert :ok = PostgresAdaptor.ingest(pid, [log_event]) + + # TODO: replace with a timeout retry func + :timer.sleep(1_500) + + query = + from(l in PostgresAdaptor.Repo.table_name(source_backend), + select: l.body + ) + + assert [ + %{ + "test" => "data" + } + ] = PostgresAdaptor.execute_query(pid, query) + end +end diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index 0a2b72328..832a8d79b 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -225,7 +225,13 @@ defmodule Logflare.EndpointsTest do :ok = PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) on_exit(fn -> - Ecto.Migrator.run(repository_module, PostgresAdaptor.Repo.migrations(source_backend), :down, all: true) + Ecto.Migrator.run( + repository_module, + PostgresAdaptor.Repo.migrations(source_backend), + :down, + all: true + ) + migration_table = Keyword.get(repository_module.config(), :migration_source) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") true = repository_module |> Process.whereis() |> Process.exit(:normal) diff --git a/test/support/factory.ex b/test/support/factory.ex index 9539ed20c..a25c53876 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -94,11 +94,18 @@ defmodule Logflare.Factory do def log_event_factory(attrs) do {source, params} = Map.pop(attrs, :source) - params = %{ - "message" => params["message"] || params[:message] || "test-msg", - "timestamp" => params["timestamp"] || params[:timestamp] || DateTime.utc_now() |> to_string, - "metadata" => params["metadata"] || params[:metadata] || %{} - } + params = + Map.merge( + params, + %{ + "message" => + params["message"] || params["event_message"] || params[:message] || "test-msg", + "timestamp" => + params["timestamp"] || params[:timestamp] || DateTime.utc_now() |> to_string, + "metadata" => params["metadata"] || params[:metadata] || %{} + } + ) + |> Map.drop([:metadata, :event_message, :message, :timestamp]) LogEvent.make(params, %{source: source}) end From 32c8ea028bf173de0be225d3f5b764e181b24e5d Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 03:05:51 +0800 Subject: [PATCH 07/62] chore: remove postgres backend url --- .docker.env | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.docker.env b/.docker.env index 93f3e567e..1cf8e6aff 100644 --- a/.docker.env +++ b/.docker.env @@ -11,5 +11,4 @@ LOGFLARE_SUPABASE_MODE=true LOGFLARE_API_KEY=my-cool-api-key-123 GOOGLE_PROJECT_ID=logflare-dev-238720 GOOGLE_PROJECT_NUMBER=1023172132421 -LOGFLARE_GRPC_PORT=50051 -POSTGRES_BACKEND_URL=postgresql://postgres:postgres@db:5432/logflare_docker \ No newline at end of file +LOGFLARE_GRPC_PORT=50051 \ No newline at end of file From f075a03f73958a841443b05079d9e695cac7e4cc Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 03:08:49 +0800 Subject: [PATCH 08/62] chore: revert runtime.exs changes --- config/runtime.exs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/config/runtime.exs b/config/runtime.exs index cf29a8800..055368699 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -10,10 +10,9 @@ config :logflare, recaptcha_secret: System.get_env("LOGFLARE_RECAPTCHA_SECRET"), config_cat_sdk_key: System.get_env("LOGFLARE_CONFIG_CAT_SDK_KEY"), single_tenant: System.get_env("LOGFLARE_SINGLE_TENANT"), - supabase_mode: System.get_env("LOGFLARE_SUPABASE_MODE", "false") == "true", + supabase_mode: System.get_env("LOGFLARE_SUPABASE_MODE"), api_key: System.get_env("LOGFLARE_API_KEY"), - cache_stats: System.get_env("LOGFLARE_CACHE_STATS", "false") == "true", - postgres_backend_url: System.get_env("POSTGRES_BACKEND_URL") + cache_stats: System.get_env("LOGFLARE_CACHE_STATS", "false") == "true" ] |> filter_nil_kv_pairs.() @@ -34,7 +33,8 @@ config :logflare, value when is_binary(value) -> String.split(value, ",") end, live_view: - filter_nil_kv_pairs.(signing_salt: System.get_env("PHX_LIVE_VIEW_SIGNING_SALT")), + [signing_salt: System.get_env("PHX_LIVE_VIEW_SIGNING_SALT")] + |> filter_nil_kv_pairs.(), live_dashboard: System.get_env("LOGFLARE_ENABLE_LIVE_DASHBOARD", "false") == "true" ) @@ -186,9 +186,7 @@ config :stripity_stripe, ) if config_env() != :test do - if !Application.get_env(:logflare, :supabase_mode) && File.exists?("gcloud.json") do - config :goth, json: File.read!("gcloud.json") - end + config :goth, json: File.read!("gcloud.json") config :grpc, port: System.get_env("LOGFLARE_GRPC_PORT", "50051") |> String.to_integer() end From ba4bc37ec0e7fe348efa228889d7569d48d3c2e7 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 03:08:54 +0800 Subject: [PATCH 09/62] chore: formatting --- test/logflare/backends/postgres_adaptor_test.exs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index e80919e83..6f8c06c3d 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -14,7 +14,8 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do config = %{ "url" => url } - source = insert(:source, user: insert(:user) ) + + source = insert(:source, user: insert(:user)) source_backend = insert(:source_backend, type: :postgres, source: source, config: config) pid = start_supervised!({PostgresAdaptor, source_backend}) @@ -26,7 +27,10 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do %{pid: pid, source_backend: source_backend} end - test "ingest/2 and execute_query/2 dispatched message", %{pid: pid, source_backend: source_backend} do + test "ingest/2 and execute_query/2 dispatched message", %{ + pid: pid, + source_backend: source_backend + } do log_event = build(:log_event, source: source_backend.source, From fbae6b37ea11089739d7c7209db41e422cbb9145 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:02:33 +0800 Subject: [PATCH 10/62] chore: revert application.ex changes --- lib/logflare/application.ex | 215 ++++++++++++++++++------------------ 1 file changed, 108 insertions(+), 107 deletions(-) diff --git a/lib/logflare/application.ex b/lib/logflare/application.ex index 584a1f85a..669a57fb0 100644 --- a/lib/logflare/application.ex +++ b/lib/logflare/application.ex @@ -24,107 +24,143 @@ defmodule Logflare.Application do children = get_children(env) + # See https://hexdocs.pm/elixir/Supervisor.html + # for other strategies and supported options opts = [strategy: :one_for_one, name: Logflare.Supervisor] Supervisor.start_link(children, opts) end - def config_change(changed, _new, removed) do - LogflareWeb.Endpoint.config_change(changed, removed) - :ok - end - - def start_phase(:seed_system, _, env: :test), do: :ok - - def start_phase(:seed_system, _, _) do - startup_tasks() - :ok + defp get_goth_child_spec() do + # Setup Goth for GCP connections + require Logger + credentials = Jason.decode!(Application.get_env(:goth, :json)) + scopes = ["https://www.googleapis.com/auth/cloud-platform"] + source = {:service_account, credentials, scopes: scopes} + {Goth, name: Logflare.Goth, source: source} end defp get_children(:test) do - cache_children() ++ - [ - Logflare.Repo, - Logs.RejectedLogEvents, - {Phoenix.PubSub, name: Logflare.PubSub, pool_size: 10}, - {Task.Supervisor, name: Logflare.TaskSupervisor} - ] ++ - v2_ingestion_pipeline_children() ++ - common_children() + [ + ContextCache, + Users.Cache, + Sources.Cache, + Billing.Cache, + SourceSchemas.Cache, + PubSubRates.Cache, + Logs.LogEvents.Cache, + Logs.RejectedLogEvents, + {Phoenix.PubSub, name: Logflare.PubSub}, + Logflare.Repo, + # get_goth_child_spec(), + LogflareWeb.Endpoint, + {Task.Supervisor, name: Logflare.TaskSupervisor}, + {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Endpoints.Cache}, + # v2 ingestion pipelines + {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.SourcesSup}, + {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.RecentLogsSup}, + {DynamicSupervisor, + strategy: :one_for_one, name: Logflare.Backends.Adaptor.PostgresAdaptor.Supervisor}, + {Registry, name: Logflare.Backends.SourceRegistry, keys: :unique}, + {Registry, name: Logflare.Backends.SourceDispatcher, keys: :duplicate} + ] ++ common_children() end defp get_children(_) do # Database options for Postgres notifications - - topologies = Application.get_env(:libcluster, :topologies, []) - - cache_children() ++ - [ - Logflare.Repo, - {Task.Supervisor, name: Logflare.TaskSupervisor}, - {Cluster.Supervisor, [topologies, [name: Logflare.ClusterSupervisor]]}, - Logs.RejectedLogEvents, - Sources.Counters, - Sources.RateCounters, - {Phoenix.PubSub, name: Logflare.PubSub, pool_size: 10}, - PubSubRates.Rates, - PubSubRates.Buffers, - PubSubRates.Inserts, - Logflare.Source.Supervisor, - - # If we get a log event and the Source.Supervisor is not up it will 500 - # Monitor system level metrics - Logflare.SystemMetricsSup - ] ++ - get_goth_children() ++ - replication_log_children() ++ - v2_ingestion_pipeline_children() ++ - grpc_children() ++ - conditional_children() ++ - common_children() - end - - defp replication_log_children() do hostname = '#{Application.get_env(:logflare, Logflare.Repo)[:hostname]}' username = Application.get_env(:logflare, Logflare.Repo)[:username] password = Application.get_env(:logflare, Logflare.Repo)[:password] database = Application.get_env(:logflare, Logflare.Repo)[:database] + port = Application.get_env(:logflare, Logflare.Repo)[:port] slot = Application.get_env(:logflare, Logflare.CacheBuster)[:replication_slot] publications = Application.get_env(:logflare, Logflare.CacheBuster)[:publications] + topologies = Application.get_env(:libcluster, :topologies, []) + grpc_port = Application.get_env(:grpc, :port) + ssl = Application.get_env(:logflare, :ssl) + grpc_creds = if ssl, do: GRPC.Credential.new(ssl: ssl) - opts = [ - register: Logflare.PgPublisher, - epgsql: %{ - host: hostname, - port: port, - username: username, - database: database, - password: password - }, - slot: slot, - wal_position: {"0", "0"}, - publications: publications - ] - - [{Cainophile.Adapters.Postgres, opts}, Logflare.CacheBuster] - end - - defp v2_ingestion_pipeline_children() do [ + {Task.Supervisor, name: Logflare.TaskSupervisor}, + {Cluster.Supervisor, [topologies, [name: Logflare.ClusterSupervisor]]}, + get_goth_child_spec(), + Logflare.Repo, + {Phoenix.PubSub, name: Logflare.PubSub, pool_size: 10}, + # supervisor(LogflareTelemetry.Supervisor, []), + # Context Caches + ContextCache, + Users.Cache, + Sources.Cache, + Billing.Cache, + SourceSchemas.Cache, + PubSubRates.Cache, + Logs.LogEvents.Cache, + + # Follow Postgresql replication log and bust all our context caches + { + Cainophile.Adapters.Postgres, + register: Logflare.PgPublisher, + epgsql: %{ + host: hostname, + port: port, + username: username, + database: database, + password: password + }, + slot: slot, + wal_position: {"0", "0"}, + publications: publications + }, + Logflare.CacheBuster, + + # Sources + Logs.RejectedLogEvents, + # init Counters before Supervisof as Supervisor calls Counters through table create + Sources.Counters, + Sources.RateCounters, + PubSubRates.Rates, + PubSubRates.Buffers, + PubSubRates.Inserts, + Logflare.Source.Supervisor, + + # If we get a log event and the Source.Supervisor is not up it will 500 + LogflareWeb.Endpoint, + {GRPC.Server.Supervisor, {LogflareGrpc.Endpoint, grpc_port, cred: grpc_creds}}, + # Monitor system level metrics + Logflare.SystemMetricsSup, + + # For Logflare Endpoints + {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Endpoints.Cache}, + + # Startup tasks + {Task, fn -> startup_tasks() end}, + + # v2 ingestion pipelines {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.SourcesSup}, {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.RecentLogsSup}, {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Backends.Adaptor.PostgresAdaptor.Supervisor}, {Registry, name: Logflare.Backends.SourceRegistry, keys: :unique}, {Registry, name: Logflare.Backends.SourceDispatcher, keys: :duplicate} - ] + ] ++ conditional_children() ++ common_children() end - defp conditional_children do + def conditional_children do config_cat_key = Application.get_env(:logflare, :config_cat_sdk_key) # only add in config cat to multi-tenant prod - if(config_cat_key, do: [{ConfigCat, [sdk_key: config_cat_key]}], else: []) + if config_cat_key do + [ + {ConfigCat, [sdk_key: config_cat_key]} + ] + else + [] + end + end + + def config_change(changed, _new, removed) do + LogflareWeb.Endpoint.config_change(changed, removed) + :ok end defp common_children do @@ -132,46 +168,10 @@ defmodule Logflare.Application do # Finch connection pools, using http2 {Finch, name: Logflare.FinchIngest, pools: %{:default => [protocol: :http2, count: 200]}}, {Finch, name: Logflare.FinchQuery, pools: %{:default => [protocol: :http2, count: 100]}}, - {Finch, name: Logflare.FinchDefault, pools: %{:default => [protocol: :http2, count: 50]}}, - LogflareWeb.Endpoint - ] - end - - defp get_goth_children() do - # Setup Goth for GCP connections - case Application.get_env(:logflare, :supabase_mode) do - true -> - [] - - false -> - credentials = Jason.decode!(Application.get_env(:goth, :json)) - scopes = ["https://www.googleapis.com/auth/cloud-platform"] - source = {:service_account, credentials, scopes: scopes} - [{Goth, name: Logflare.Goth, source: source}] - end - end - - defp cache_children() do - [ - ContextCache, - Users.Cache, - Sources.Cache, - Billing.Cache, - SourceSchemas.Cache, - PubSubRates.Cache, - Logs.LogEvents.Cache, - {DynamicSupervisor, strategy: :one_for_one, name: Logflare.Endpoints.Cache} + {Finch, name: Logflare.FinchDefault, pools: %{:default => [protocol: :http2, count: 50]}} ] end - defp grpc_children() do - grpc_port = Application.get_env(:grpc, :port) - ssl = Application.get_env(:logflare, :ssl) - grpc_creds = if ssl, do: GRPC.Credential.new(ssl: ssl) - - [{GRPC.Server.Supervisor, {LogflareGrpc.Endpoint, grpc_port, cred: grpc_creds}}] - end - def startup_tasks do # if single tenant, insert enterprise user Logger.info("Executing startup tasks") @@ -189,6 +189,7 @@ defmodule Logflare.Application do # buffer time for all sources to init and create tables # in case of latency. :timer.sleep(3_000) + SingleTenant.update_supabase_source_schemas() end end end From 1a87e948941d8ac5aa986441f7d2e3b3c5bc8428 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:03:11 +0800 Subject: [PATCH 11/62] chore: revert logs.ex --- lib/logflare/logs/logs.ex | 64 +++++++++++++++------------------------ 1 file changed, 25 insertions(+), 39 deletions(-) diff --git a/lib/logflare/logs/logs.ex b/lib/logflare/logs/logs.ex index 6c3c54a4a..d70345777 100644 --- a/lib/logflare/logs/logs.ex +++ b/lib/logflare/logs/logs.ex @@ -2,49 +2,41 @@ defmodule Logflare.Logs do @moduledoc false require Logger - alias Logflare.LogEvent - alias Logflare.Logs.IngestTransformers - alias Logflare.Logs.IngestTypecasting - alias Logflare.Logs.RejectedLogEvents + alias Logflare.LogEvent, as: LE + alias Logflare.Logs.{RejectedLogEvents} + alias Logflare.{SystemMetrics, Source, Sources} + alias Logflare.Source.{BigQuery.BufferCounter, RecentLogsServer} alias Logflare.Logs.SourceRouting - alias Logflare.Rule - alias Logflare.Source - alias Logflare.Source.BigQuery.BufferCounter - alias Logflare.Source.RecentLogsServer + alias Logflare.Logs.IngestTypecasting + alias Logflare.Logs.IngestTransformers alias Logflare.Source.Supervisor - alias Logflare.Sources - alias Logflare.SystemMetrics + alias Logflare.Rule @spec ingest_logs(list(map), Source.t()) :: :ok | {:error, term} - def ingest_logs(log_params_batch, %Source{rules: rules} = source) - when is_list(rules) do + def ingest_logs(log_params_batch, %Source{rules: rules} = source) when is_list(rules) do log_params_batch |> Enum.map(fn log -> log |> IngestTypecasting.maybe_apply_transform_directives() |> IngestTransformers.transform(:to_bigquery_column_spec) - |> LogEvent.make(%{source: source}) + |> LE.make(%{source: source}) |> maybe_mark_le_dropped_by_lql() |> maybe_ingest_and_broadcast() end) - |> Enum.reduce([], fn - %{valid: true}, acc -> acc - le, acc -> [le.validation_error | acc] + |> Enum.reduce([], fn le, acc -> + if le.valid do + acc + else + [le.validation_error | acc] + end end) - |> then(fn + |> case do [] -> :ok errors when is_list(errors) -> {:error, errors} - end) - end - - def ingest(%LogEvent{source: %Source{} = source} = le) do - case Logflare.SingleTenant.supabase_mode?() do - true -> ingest(:backends, source, le) - false -> ingest(:bigquery, source, le) end end - defp ingest(:bigquery, source, le) do + def ingest(%LE{source: %Source{} = source} = le) do # indvididual source genservers Supervisor.ensure_started(source.token) @@ -57,34 +49,28 @@ defmodule Logflare.Logs do Sources.Counters.increment(source.token) SystemMetrics.AllLogsLogged.increment(:total_logs_logged) - :ok - end - defp ingest(:backends, source, le) do - Logflare.Backends.ingest_logs([le], source) + :ok end - def broadcast(%LogEvent{} = le) do + def broadcast(%LE{} = le) do if le.source.metrics.avg < 5 do Source.ChannelTopics.broadcast_new(le) end end - def maybe_mark_le_dropped_by_lql(%LogEvent{source: %{drop_lql_string: drop_lql_string}} = le) + def maybe_mark_le_dropped_by_lql(%LE{source: %{drop_lql_string: drop_lql_string}} = le) when is_nil(drop_lql_string) do le end def maybe_mark_le_dropped_by_lql( - %LogEvent{ - body: _body, - source: %{drop_lql_string: drop_lql_string, drop_lql_filters: filters} - } = le + %LE{body: _body, source: %{drop_lql_string: drop_lql_string, drop_lql_filters: filters}} = + le ) when is_binary(drop_lql_string) do cond do - length(filters) >= 1 && - SourceRouting.route_with_lql_rules?(le, %Rule{lql_filters: filters}) -> + length(filters) >= 1 && SourceRouting.route_with_lql_rules?(le, %Rule{lql_filters: filters}) -> Map.put(le, :drop, true) true -> @@ -92,7 +78,7 @@ defmodule Logflare.Logs do end end - defp maybe_ingest_and_broadcast(%LogEvent{} = le) do + defp maybe_ingest_and_broadcast(%LE{} = le) do cond do le.drop -> le @@ -100,7 +86,7 @@ defmodule Logflare.Logs do le.valid -> le |> tap(&SourceRouting.route_to_sinks_and_ingest/1) - |> LogEvent.apply_custom_event_message() + |> LE.apply_custom_event_message() |> tap(&ingest/1) # use module reference namespace for Mimic mocking |> tap(&__MODULE__.broadcast/1) From 08d6d328551d734fe27931cd9ac3c0ebe71eaf04 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:04:08 +0800 Subject: [PATCH 12/62] chore: revert gen_utils.ex --- lib/logflare/google/bigquery/gen_utils/gen_utils.ex | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/logflare/google/bigquery/gen_utils/gen_utils.ex b/lib/logflare/google/bigquery/gen_utils/gen_utils.ex index 70a97aa97..d3479e99e 100644 --- a/lib/logflare/google/bigquery/gen_utils/gen_utils.ex +++ b/lib/logflare/google/bigquery/gen_utils/gen_utils.ex @@ -72,9 +72,8 @@ defmodule Logflare.Google.BigQuery.GenUtils do @typep conn_type :: :ingest | :query | :default @spec get_conn(conn_type()) :: Tesla.Env.client() def get_conn(conn_type \\ :default) do - Logflare.Goth - |> Goth.fetch() - |> then(fn + Goth.fetch(Logflare.Goth) + |> case do {:ok, %Goth.Token{} = goth} -> Connection.new(goth.token) @@ -82,7 +81,7 @@ defmodule Logflare.Google.BigQuery.GenUtils do Logger.error("Goth error!", error_string: inspect(reason)) # This is going to give us an unauthorized connection but we are handling it downstream. Connection.new("") - end) + end # dynamically set tesla adapter |> Map.update!(:adapter, fn _value -> build_tesla_adapter_call(conn_type) end) end From 4e518030bf5eec2c5c7d044991c5cf251e487659 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:05:58 +0800 Subject: [PATCH 13/62] chore: revert backends.ex --- lib/logflare/backends.ex | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/logflare/backends.ex b/lib/logflare/backends.ex index 468748f19..3cfa48fe2 100644 --- a/lib/logflare/backends.ex +++ b/lib/logflare/backends.ex @@ -1,7 +1,7 @@ defmodule Logflare.Backends do @moduledoc false - alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.Adaptor.WebhookAdaptor + alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.RecentLogs alias Logflare.Backends.RecentLogsSup alias Logflare.Backends.SourceBackend @@ -184,8 +184,8 @@ defmodule Logflare.Backends do @spec start_source_sup(Source.t()) :: :ok | {:error, :already_started} def start_source_sup(%Source{} = source) do case DynamicSupervisor.start_child(SourcesSup, {SourceSup, source}) do - {:ok, pid} -> {:ok, pid} - {:error, {:already_started, _pid}} -> {:error, :already_started} + {:ok, _pid} -> :ok + {:error, {:already_started = reason, _pid}} -> {:error, reason} end end @@ -209,7 +209,7 @@ defmodule Logflare.Backends do :ok | {:error, :already_started} | {:error, :not_started} def restart_source_sup(%Source{} = source) do with :ok <- stop_source_sup(source), - {:ok, _} <- start_source_sup(source) do + :ok <- start_source_sup(source) do :ok end end From 9ec270263f1b0547fc70abfaaa374d97746588ef Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:10:56 +0800 Subject: [PATCH 14/62] chore: revert source_channel.ex --- lib/logflare_web/channels/source_channel.ex | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/logflare_web/channels/source_channel.ex b/lib/logflare_web/channels/source_channel.ex index 0a3003f82..2350fb6d4 100644 --- a/lib/logflare_web/channels/source_channel.ex +++ b/lib/logflare_web/channels/source_channel.ex @@ -22,8 +22,7 @@ defmodule LogflareWeb.SourceChannel do true socket.assigns[:user] -> - socket.assigns[:user].sources - |> Enum.map(& &1.token) + Enum.map(socket.assigns[:user].sources, & &1.token) |> Enum.member?(String.to_existing_atom(source_token)) true -> From 371f26af973ccfe404b0a3b551c42cd7d4ec45b5 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:13:27 +0800 Subject: [PATCH 15/62] chore: revert sources.ex --- lib/logflare/sources.ex | 41 ++++++++++++++++++----------------------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/lib/logflare/sources.ex b/lib/logflare/sources.ex index b4f1011f4..e662f6353 100644 --- a/lib/logflare/sources.ex +++ b/lib/logflare/sources.ex @@ -5,7 +5,6 @@ defmodule Logflare.Sources do import Ecto.Query, only: [from: 2] - alias Logflare.SingleTenant alias Logflare.Cluster alias Logflare.Google.BigQuery.GenUtils alias Logflare.Google.BigQuery.SchemaUtils @@ -41,33 +40,29 @@ defmodule Logflare.Sources do @spec create_source(map(), User.t()) :: {:ok, Source.t()} | {:error, Ecto.Changeset.t()} def create_source(source_params, user) do - user - |> Ecto.build_assoc(:sources) - |> Source.update_by_user_changeset(source_params) - |> Repo.insert() - |> initialize_source(SingleTenant.supabase_mode?()) - end + source = + user + |> Ecto.build_assoc(:sources) + |> Source.update_by_user_changeset(source_params) + |> Repo.insert() - defp initialize_source({:ok, source}, true) do - {:ok, source} - end + case source do + {:ok, source} -> + init_schema = SchemaBuilder.initial_table_schema() - defp initialize_source({:ok, source}, false) do - init_schema = SchemaBuilder.initial_table_schema() + {:ok, _source_schema} = + SourceSchemas.create_source_schema(source, %{ + bigquery_schema: init_schema, + schema_flat_map: SchemaUtils.bq_schema_to_flat_typemap(init_schema) + }) - {:ok, _source_schema} = - SourceSchemas.create_source_schema(source, %{ - bigquery_schema: init_schema, - schema_flat_map: SchemaUtils.bq_schema_to_flat_typemap(init_schema) - }) + Source.Supervisor.start_source(source.token) - Source.Supervisor.start_source(source.token) + {:ok, source} - {:ok, source} - end - - defp initialize_source({:error, changeset}, _) do - {:error, changeset} + {:error, changeset} -> + {:error, changeset} + end end @doc """ From c5acd6227c463fd50577d3bc5752d4bc73a05c32 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:21:43 +0800 Subject: [PATCH 16/62] chore: formatting --- config/runtime.exs | 1 - 1 file changed, 1 deletion(-) diff --git a/config/runtime.exs b/config/runtime.exs index 055368699..043ae5180 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -187,7 +187,6 @@ config :stripity_stripe, if config_env() != :test do config :goth, json: File.read!("gcloud.json") - config :grpc, port: System.get_env("LOGFLARE_GRPC_PORT", "50051") |> String.to_integer() end From c0eacb4cd1b0dbda500d747feabec5d80c473684 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:22:19 +0800 Subject: [PATCH 17/62] chore: revert single_tenant.ex --- lib/logflare/single_tenant.ex | 120 ++++++++++++++++++++++++---------- 1 file changed, 86 insertions(+), 34 deletions(-) diff --git a/lib/logflare/single_tenant.ex b/lib/logflare/single_tenant.ex index fb2037236..cec0890c1 100644 --- a/lib/logflare/single_tenant.ex +++ b/lib/logflare/single_tenant.ex @@ -2,16 +2,17 @@ defmodule Logflare.SingleTenant do @moduledoc """ Handles single tenant-related logic """ - alias Logflare.Backends + alias Logflare.Users alias Logflare.Billing alias Logflare.Billing.Plan - alias Logflare.Endpoints alias Logflare.Endpoints.Query alias Logflare.Source - alias Logflare.Repo alias Logflare.Sources - alias Logflare.Users - + alias Logflare.Endpoints + alias Logflare.Repo + alias Logflare.Source.Supervisor + alias Logflare.Source.BigQuery.Schema + alias Logflare.LogEvent require Logger @user_attrs %{ @@ -23,7 +24,6 @@ defmodule Logflare.SingleTenant do provider_uid: "default", endpoints_beta: true } - @plan_attrs %{ name: "Enterprise", period: "year", @@ -50,16 +50,7 @@ defmodule Logflare.SingleTenant do "postgREST.logs.prod", "pgbouncer.logs.prod" ] - @endpoint_params [ - %{ - name: "test", - query: "select body from 'cloudflare.logs.prod'", - sandboxable: true, - max_limit: 1000, - enable_auth: true, - cache_duration_seconds: 0 - }, %{ name: "logs.all", query: @@ -105,7 +96,9 @@ defmodule Logflare.SingleTenant do """ def get_default_plan do Billing.list_plans() - |> Enum.find(fn plan -> @plan_attrs = plan end) + |> Enum.find(fn plan -> + @plan_attrs = plan + end) end @doc """ @@ -128,11 +121,14 @@ defmodule Logflare.SingleTenant do """ @spec create_default_plan() :: {:ok, Plan.t()} | {:error, :already_created} def create_default_plan do - plan = Billing.list_plans() |> Enum.find(fn plan -> plan.name == "Enterprise" end) + plan = + Billing.list_plans() + |> Enum.find(fn plan -> plan.name == "Enterprise" end) - case plan do - nil -> Billing.create_plan(@plan_attrs) - _ -> {:error, :already_created} + if plan == nil do + Billing.create_plan(@plan_attrs) + else + {:error, :already_created} end end @@ -148,10 +144,7 @@ defmodule Logflare.SingleTenant do sources = for name <- @source_names do # creating a source will automatically start the source's RLS process - url = Application.get_env(:logflare, :postgres_backend_url) - {:ok, source} = Sources.create_source(%{name: name, v2_pipeline: true}, user) - {:ok, _} = Backends.create_source_backend(source, :postgres, %{url: url}) - + {:ok, source} = Sources.create_source(%{name: name}, user) source end @@ -166,16 +159,17 @@ defmodule Logflare.SingleTenant do Note: not tested as `Logflare.Source.Supervisor` is a pain to mock. TODO: add testing for v2 """ - @spec ensure_supabase_sources_started() :: list() + @spec ensure_supabase_sources_started() :: :ok def ensure_supabase_sources_started do user = get_default_user() if user do for source <- Sources.list_sources_by_user(user) do - source = Repo.preload(source, :source_backends) - Logflare.Backends.start_source_sup(source) + Supervisor.ensure_started(source.token) end end + + :ok end @doc """ @@ -208,6 +202,33 @@ defmodule Logflare.SingleTenant do @spec supabase_mode? :: boolean() def supabase_mode?, do: !!Application.get_env(:logflare, :supabase_mode) and single_tenant?() + @doc """ + Adds ingestion samples for supabase sources, so that schema is built and stored correctly. + """ + @spec update_supabase_source_schemas :: nil + def update_supabase_source_schemas do + if supabase_mode?() do + user = get_default_user() + + sources = + Sources.list_sources_by_user(user) + |> Repo.preload(:rules) + + tasks = + for source <- sources do + Task.async(fn -> + source = Sources.refresh_source_metrics_for_ingest(source) + Logger.debug("Updating schemas for for #{source.name}") + event = read_ingest_sample_json(source.name) + log_event = LogEvent.make(event, %{source: source}) + Schema.update(source.token, log_event) + end) + end + + Task.await_many(tasks) + end + end + @doc """ Returns the status of supabase mode setup process. Possible statuses: :ok, nil @@ -220,18 +241,49 @@ defmodule Logflare.SingleTenant do seed_plan = if default_plan, do: :ok seed_sources = - default_user && - Sources.list_sources_by_user(default_user) - |> Enum.map(&Backends.source_sup_started?/1) - |> Enum.count(& &1) + if default_user do + if Sources.list_sources_by_user(default_user) |> length() > 0, do: :ok + end - seed_endpoints = default_user && Endpoints.list_endpoints_by(user_id: default_user.id) + seed_endpoints = + if default_user do + if Endpoints.list_endpoints_by(user_id: default_user.id) |> length() > 0, do: :ok + end + + source_schemas_updated = if supabase_mode_source_schemas_updated?(), do: :ok %{ seed_user: seed_user, seed_plan: seed_plan, - seed_sources: if(seed_sources > 0, do: :ok), - seed_endpoints: if(seed_endpoints > 0, do: :ok) + seed_sources: seed_sources, + seed_endpoints: seed_endpoints, + source_schemas_updated: source_schemas_updated } end + + def supabase_mode_source_schemas_updated? do + user = get_default_user() + + if user do + sources = Sources.list_sources_by_user(user) + + checks = + for source <- sources, + source.name in @source_names, + state = Schema.get_state(source.token) do + state.field_count > 3 + end + + Enum.all?(checks) and length(sources) > 0 + else + false + end + end + + # Read a source ingest sample json file + defp read_ingest_sample_json(source_name) do + Application.app_dir(:logflare, "priv/supabase/ingest_samples/#{source_name}.json") + |> File.read!() + |> Jason.decode!() + end end From d6bbfd879f55518ca27e905cafeb83ebdf06ce48 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:22:55 +0800 Subject: [PATCH 18/62] chore: revert supervisor.ex --- lib/logflare/source/supervisor.ex | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/lib/logflare/source/supervisor.ex b/lib/logflare/source/supervisor.ex index 74c83400c..ca4047cc0 100644 --- a/lib/logflare/source/supervisor.ex +++ b/lib/logflare/source/supervisor.ex @@ -179,25 +179,24 @@ defmodule Logflare.Source.Supervisor do defp create_source(source_id) do # Double check source is in the database before starting # Can be removed when manager fns move into their own genserver - if !Logflare.SingleTenant.supabase_mode?() do - source = Sources.get_by(token: source_id) - if source do - rls = %RLS{source_id: source_id, source: source} + source = Sources.get_by(token: source_id) - children = [ - Supervisor.child_spec({RLS, rls}, id: source_id, restart: :transient) - ] + if source do + rls = %RLS{source_id: source_id, source: source} - # fire off async init in async task, so that bq call does not block. - Tasks.start_child(fn -> init_table(source_id) end) + children = [ + Supervisor.child_spec({RLS, rls}, id: source_id, restart: :transient) + ] - Supervisor.start_link(children, strategy: :one_for_one, max_restarts: 10, max_seconds: 60) - else - {:error, :not_found_in_db} - end + # fire off async init in async task, so that bq call does not block. + Tasks.start_child(fn -> + init_table(source_id) + end) + + Supervisor.start_link(children, strategy: :one_for_one, max_restarts: 10, max_seconds: 60) else - {:error, :supabase_mode} + {:error, :not_found_in_db} end end From 186c4cb682ec663c46e306eee6b86a3f500d41de Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:33:12 +0800 Subject: [PATCH 19/62] chore: revert mix.exs --- mix.exs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mix.exs b/mix.exs index 43d778ed5..c7a1f8b1e 100644 --- a/mix.exs +++ b/mix.exs @@ -51,8 +51,7 @@ defmodule Logflare.Mixfile do :crypto, :os_mon ], - included_applications: [:mnesia], - start_phases: [seed_system: [env: Mix.env()]] + included_applications: [:mnesia] ] end From 64d6675eea4296db3e92409f5fdf45f7c9825c1a Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:33:50 +0800 Subject: [PATCH 20/62] chore: revert source_backends_live.ex --- lib/logflare_web/live/source_backends_live.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/logflare_web/live/source_backends_live.ex b/lib/logflare_web/live/source_backends_live.ex index 0bbd753e8..888246e83 100644 --- a/lib/logflare_web/live/source_backends_live.ex +++ b/lib/logflare_web/live/source_backends_live.ex @@ -86,7 +86,7 @@ defmodule LogflareWeb.SourceBackendsLive do %{assigns: %{source: source}} = socket socket = - case Backends.create_source_backend(source, params["type"], params) do + case Logflare.Backends.create_source_backend(source, params["type"], params) do {:ok, _} -> assign(socket, :show_create_form, false) From 9b42aafa952a035b965a80bc02c055a9d22f26f2 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:34:47 +0800 Subject: [PATCH 21/62] chore: revert backends_test.exs --- test/logflare/backends_test.exs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/logflare/backends_test.exs b/test/logflare/backends_test.exs index 54d66a59d..7c37324c7 100644 --- a/test/logflare/backends_test.exs +++ b/test/logflare/backends_test.exs @@ -91,14 +91,14 @@ defmodule Logflare.BackendsTest do end test "start_source_sup/1, stop_source_sup/1, restart_source_sup/1", %{source: source} do - assert {:ok, _} = Backends.start_source_sup(source) + assert :ok = Backends.start_source_sup(source) assert {:error, :already_started} = Backends.start_source_sup(source) assert :ok = Backends.stop_source_sup(source) assert {:error, :not_started} = Backends.stop_source_sup(source) assert {:error, :not_started} = Backends.restart_source_sup(source) - assert {:ok, _} = Backends.start_source_sup(source) + assert :ok = Backends.start_source_sup(source) assert :ok = Backends.restart_source_sup(source) end end From fc4722bc621b63e94a827cf6a67def551996965f Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:35:30 +0800 Subject: [PATCH 22/62] chore: revert logs_test.exs --- test/logflare/logs/logs_test.exs | 97 ++++++++++++++++---------------- 1 file changed, 48 insertions(+), 49 deletions(-) diff --git a/test/logflare/logs/logs_test.exs b/test/logflare/logs/logs_test.exs index 53087e1ee..73c648ec8 100644 --- a/test/logflare/logs/logs_test.exs +++ b/test/logflare/logs/logs_test.exs @@ -1,11 +1,9 @@ defmodule Logflare.LogsTest do @moduledoc false use Logflare.DataCase - - alias Logflare.Backends - alias Logflare.LogEvent alias Logflare.Logs alias Logflare.Lql + # v1 pipeline alias Logflare.Source.RecentLogsServer alias Logflare.Sources.Counters alias Logflare.Sources.RateCounters @@ -34,7 +32,8 @@ defmodule Logflare.LogsTest do setup do # mock goth behaviour - stub(Goth, :fetch, fn _mod -> {:ok, %Goth.Token{token: "auth-token"}} end) + Goth + |> stub(:fetch, fn _mod -> {:ok, %Goth.Token{token: "auth-token"}} end) :ok end @@ -43,37 +42,46 @@ defmodule Logflare.LogsTest do describe "ingest input" do test "empty list", %{source: source} do - Mimic.reject(Logs, :broadcast, 1) + Logs + |> Mimic.reject(:broadcast, 1) assert :ok = Logs.ingest_logs([], source) end test "message key gets converted to event_message", %{source: source} do - expect(Logs, :broadcast, 1, fn le -> + Logs + |> expect(:broadcast, 1, fn le -> assert %{"event_message" => "testing 123"} = le.body assert Map.keys(le.body) |> length() == 3 le end) - batch = [%{"message" => "testing 123"}] + batch = [ + %{"message" => "testing 123"} + ] assert :ok = Logs.ingest_logs(batch, source) end test "top level keys", %{source: source} do - batch = [%{"event_message" => "testing 123", "other" => 123}] + batch = [ + %{"event_message" => "testing 123", "other" => 123} + ] assert :ok = Logs.ingest_logs(batch, source) end test "non-map value for metadata key", %{source: source} do - expect(Logs, :broadcast, 1, fn le -> + Logs + |> expect(:broadcast, 1, fn le -> assert %{"metadata" => "some_value"} = le.body le end) - batch = [%{"event_message" => "any", "metadata" => "some_value"}] + batch = [ + %{"event_message" => "any", "metadata" => "some_value"} + ] assert :ok = Logs.ingest_logs(batch, source) end @@ -81,11 +89,12 @@ defmodule Logflare.LogsTest do describe "full ingestion pipeline test" do test "additive schema update from log event", %{source: source} do - expect(GoogleApi.BigQuery.V2.Api.Tabledata, :bigquery_tabledata_insert_all, fn conn, - _project_id, - _dataset_id, - _table_name, - opts -> + GoogleApi.BigQuery.V2.Api.Tabledata + |> expect(:bigquery_tabledata_insert_all, fn conn, + _project_id, + _dataset_id, + _table_name, + opts -> assert {Tesla.Adapter.Finch, :call, [[name: Logflare.FinchIngest, receive_timeout: _]]} = conn.adapter @@ -94,11 +103,12 @@ defmodule Logflare.LogsTest do {:ok, %GoogleApi.BigQuery.V2.Model.TableDataInsertAllResponse{insertErrors: nil}} end) - expect(GoogleApi.BigQuery.V2.Api.Tables, :bigquery_tables_patch, fn conn, - _project_id, - _dataset_id, - _table_name, - [body: body] -> + GoogleApi.BigQuery.V2.Api.Tables + |> expect(:bigquery_tables_patch, fn conn, + _project_id, + _dataset_id, + _table_name, + [body: body] -> # use default config adapter assert conn.adapter == nil schema = body.schema @@ -106,9 +116,12 @@ defmodule Logflare.LogsTest do {:ok, %{}} end) - expect(Logflare.Mailer, :deliver, fn _ -> :ok end) + Logflare.Mailer + |> expect(:deliver, fn _ -> :ok end) - batch = [%{"event_message" => "testing 123", "key" => "value"}] + batch = [ + %{"event_message" => "testing 123", "key" => "value"} + ] assert :ok = Logs.ingest_logs(batch, source) :timer.sleep(1_500) @@ -122,15 +135,19 @@ defmodule Logflare.LogsTest do drop_test = insert(:source, user: user, drop_lql_string: "testing", drop_lql_filters: lql_filters) - Mimic.reject(Logs, :broadcast, 1) + Logs + |> Mimic.reject(:broadcast, 1) - batch = [%{"event_message" => "testing 123"}] + batch = [ + %{"event_message" => "testing 123"} + ] assert :ok = Logs.ingest_logs(batch, drop_test) end test "no rules", %{source: source} do - expect(Logs, :broadcast, 2, fn le -> le end) + Logs + |> expect(:broadcast, 2, fn le -> le end) batch = [ %{"event_message" => "routed"}, @@ -144,7 +161,8 @@ defmodule Logflare.LogsTest do insert(:rule, lql_string: "testing", sink: target.token, source_id: source.id) source = source |> Repo.preload(:rules, force: true) - expect(Logs, :broadcast, 3, fn le -> le end) + Logs + |> expect(:broadcast, 3, fn le -> le end) batch = [ %{"event_message" => "not routed"}, @@ -158,7 +176,8 @@ defmodule Logflare.LogsTest do insert(:rule, regex: "routed123", sink: target.token, source_id: source.id) source = source |> Repo.preload(:rules, force: true) - expect(Logs, :broadcast, 3, fn le -> le end) + Logs + |> expect(:broadcast, 3, fn le -> le end) batch = [ %{"event_message" => "not routed"}, @@ -174,30 +193,10 @@ defmodule Logflare.LogsTest do insert(:rule, lql_string: "testing", sink: other_target.token, source_id: target.id) source = source |> Repo.preload(:rules, force: true) - expect(Logs, :broadcast, 2, fn le -> le end) + Logs + |> expect(:broadcast, 2, fn le -> le end) assert :ok = Logs.ingest_logs([%{"event_message" => "testing 123"}], source) end end - - describe "ingest for supabase_mode" do - TestUtils.setup_single_tenant(seed_user: true, supabase_mode: true) - - setup do - user = insert(:user) - source = insert(:source, user: user) - {:ok, pid} = Backends.start_source_sup(source) - - on_exit(fn -> - Process.exit(pid, :normal) - end) - - %{source: source} - end - - test "ingest logs into backends", %{source: source} do - le = LogEvent.make(%{body: %{"event_message" => "testing 123"}}, %{source: source}) - :ok = Logs.ingest(le) - end - end end From 91203a29c2dbcad9410905d3aaea9098785edf14 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:35:56 +0800 Subject: [PATCH 23/62] chore: revert single_tenant_test.exs --- test/logflare/single_tenant_test.exs | 55 +++++++++++----------------- 1 file changed, 22 insertions(+), 33 deletions(-) diff --git a/test/logflare/single_tenant_test.exs b/test/logflare/single_tenant_test.exs index 8cc16856e..4e5d69a64 100644 --- a/test/logflare/single_tenant_test.exs +++ b/test/logflare/single_tenant_test.exs @@ -1,14 +1,14 @@ defmodule Logflare.SingleTenantTest do - use Logflare.DataCase, async: false - + @moduledoc false + use Logflare.DataCase + alias Logflare.SingleTenant alias Logflare.Billing + alias Logflare.Users + alias Logflare.User alias Logflare.Billing.Plan - alias Logflare.Endpoints - alias Logflare.Repo - alias Logflare.SingleTenant alias Logflare.Sources - alias Logflare.User - alias Logflare.Users + alias Logflare.Endpoints + alias Logflare.Source.BigQuery.Schema describe "single tenant mode" do TestUtils.setup_single_tenant() @@ -62,38 +62,25 @@ defmodule Logflare.SingleTenantTest do TestUtils.setup_single_tenant(seed_user: true, supabase_mode: true) setup do - %{username: username, password: password, database: database, hostname: hostname} = - Application.get_env(:logflare, Logflare.Repo) |> Map.new() - - url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" - previous_url = Application.get_env(:logflare, :postgres_backend_url) - Application.put_env(:logflare, :postgres_backend_url, url) - - on_exit(fn -> - Application.put_env(:logflare, :postgres_backend_url, previous_url) - end) - - %{url: url} + stub(Schema, :update, fn _token, _le -> :ok end) + :ok end - test "create_supabase_sources/0, create_supabase_endpoints/0", %{url: url} do - assert {:ok, sources} = SingleTenant.create_supabase_sources() + test "create_supabase_sources/0, create_supabase_endpoints/0" do + assert {:ok, [_ | _]} = SingleTenant.create_supabase_sources() assert {:error, :already_created} = SingleTenant.create_supabase_sources() - assert [url] == - sources - |> Enum.map(&Repo.preload(&1, :source_backends)) - |> Enum.map(fn %{source_backends: [%{config: %{"url" => url}}]} -> url end) - |> Enum.uniq() - # must have sources created first assert {:ok, [_ | _]} = SingleTenant.create_supabase_endpoints() assert {:error, :already_created} = SingleTenant.create_supabase_endpoints() end test "startup tasks inserts log sources/endpoints" do + expect(Schema, :update, 9, fn _source_token, _log_event -> :ok end) + SingleTenant.create_supabase_sources() SingleTenant.create_supabase_endpoints() + SingleTenant.update_supabase_source_schemas() user = SingleTenant.get_default_user() sources = Sources.list_sources_by_user(user) @@ -102,20 +89,22 @@ defmodule Logflare.SingleTenantTest do end test "supabase_mode_status/0" do + stub(Schema, :get_state, fn _ -> %{field_count: 3} end) SingleTenant.create_supabase_sources() - SingleTenant.create_supabase_endpoints() - started = SingleTenant.ensure_supabase_sources_started() |> Enum.map(&elem(&1, 1)) assert %{ seed_user: :ok, seed_plan: :ok, seed_sources: :ok, - seed_endpoints: :ok + seed_endpoints: nil, + source_schemas_updated: nil } = SingleTenant.supabase_mode_status() - on_exit(fn -> - Enum.each(started, &Process.exit(&1, :normal)) - end) + stub(Schema, :get_state, fn _ -> %{field_count: 5} end) + + assert %{ + source_schemas_updated: :ok + } = SingleTenant.supabase_mode_status() end end end From 83beaa17404454437ad2e286627dcfe4247d08e2 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:37:10 +0800 Subject: [PATCH 24/62] chore: revert health_check_controller_test --- .../health_check_controller_test.exs | 20 +++---------------- 1 file changed, 3 insertions(+), 17 deletions(-) diff --git a/test/logflare_web/controllers/health_check_controller_test.exs b/test/logflare_web/controllers/health_check_controller_test.exs index 31ad66f0b..663f5b40e 100644 --- a/test/logflare_web/controllers/health_check_controller_test.exs +++ b/test/logflare_web/controllers/health_check_controller_test.exs @@ -3,6 +3,7 @@ defmodule LogflareWeb.HealthCheckControllerTest do For node-level health check only. """ use LogflareWeb.ConnCase + alias Logflare.Source.BigQuery.Schema alias Logflare.SingleTenant test "normal node health check", %{conn: conn} do @@ -25,29 +26,14 @@ defmodule LogflareWeb.HealthCheckControllerTest do TestUtils.setup_single_tenant(seed_user: true, supabase_mode: true) setup do - %{username: username, password: password, database: database, hostname: hostname} = - Application.get_env(:logflare, Logflare.Repo) |> Map.new() - - url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" - previous_url = Application.get_env(:logflare, :postgres_backend_url) - Application.put_env(:logflare, :postgres_backend_url, url) - - on_exit(fn -> - Application.put_env(:logflare, :postgres_backend_url, previous_url) - end) - - %{url: url} + stub(Schema, :get_state, fn _ -> %{field_count: 5} end) + :ok end test "ok", %{conn: conn} do SingleTenant.create_supabase_sources() SingleTenant.create_supabase_endpoints() - started = SingleTenant.ensure_supabase_sources_started() |> Enum.map(&elem(&1, 1)) assert %{"status" => "ok"} = conn |> get("/health") |> json_response(200) - - on_exit(fn -> - Enum.each(started, &Process.exit(&1, :normal)) - end) end end end From f42db2af886060493e4cd18183ff50e0321789de Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:37:43 +0800 Subject: [PATCH 25/62] chore: revert test_utils.ex --- test/support/test_utils.ex | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/support/test_utils.ex b/test/support/test_utils.ex index cc27388de..174e84861 100644 --- a/test/support/test_utils.ex +++ b/test/support/test_utils.ex @@ -28,6 +28,16 @@ defmodule Logflare.TestUtils do quote do setup do + # conditionally update bigquery project id + initial_google_config = Application.get_env(:logflare, Logflare.Google) + replacement_project_id = unquote(opts.bigquery_project_id) + updated = Keyword.put(initial_google_config, :project_id, replacement_project_id) + Application.put_env(:logflare, Logflare.Google, updated) + + on_exit(fn -> + Application.put_env(:logflare, Logflare.Google, initial_google_config) + end) + # perform application env adjustments at runtime initial_single_tenant = Application.get_env(:logflare, :single_tenant) Application.put_env(:logflare, :single_tenant, true) From 8acdebd406ff627074a570841bda1d6d9db020bd Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:41:53 +0800 Subject: [PATCH 26/62] chore: fix compilation warnings --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 475728171..7f23dc305 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -18,8 +18,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do use GenServer use TypedStruct use Logflare.Backends.Adaptor - alias Logflare.Backends.Adaptor - @behaviour Logflare.Backends.Adaptor alias Logflare.Backends alias Logflare.Backends.SourceBackend @@ -62,26 +60,21 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do end end - @impl Logflare.Backends.Adaptor def ingest(pid, log_events), do: GenServer.call(pid, {:ingest, log_events}) - @impl Adaptor def cast_config(params) do {%{}, %{url: :string}} |> Ecto.Changeset.cast(params, [:url]) end - @impl Adaptor def validate_config(changeset) do changeset |> Ecto.Changeset.validate_required([:url]) |> Ecto.Changeset.validate_format(:url, ~r/postgresql?\:\/\/.+/) end - @impl Adaptor def queryable?(), do: true - @impl Adaptor def execute_query(pid, query) do GenServer.call(pid, {:execute_query, query}) end @@ -132,7 +125,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do end @impl true - def handle_call({:execute_query, %Ecto.Query{select: select} = query}, _from, state) do + def handle_call({:execute_query, %Ecto.Query{} = query}, _from, state) do mod = state.repository_module result = mod.all(query) {:reply, result, state} From b345b8a67080b2697f1d993a8c53e65b171a654e Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:42:49 +0800 Subject: [PATCH 27/62] chore: add back @impl to callbacks --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 7f23dc305..1f1741f3b 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -60,21 +60,26 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do end end + @impl true def ingest(pid, log_events), do: GenServer.call(pid, {:ingest, log_events}) + @impl true def cast_config(params) do {%{}, %{url: :string}} |> Ecto.Changeset.cast(params, [:url]) end + @impl true def validate_config(changeset) do changeset |> Ecto.Changeset.validate_required([:url]) |> Ecto.Changeset.validate_format(:url, ~r/postgresql?\:\/\/.+/) end + @impl true def queryable?(), do: true + @impl true def execute_query(pid, query) do GenServer.call(pid, {:execute_query, query}) end @@ -106,7 +111,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do migrations_table = migrations_table_name(source_backend) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migrations_table}") :ok - # GenServer.call(pid, :drop_migrations_table) end @doc """ From cbcfcab57c0749b743686ed4c9a138b7843b5c0a Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:43:17 +0800 Subject: [PATCH 28/62] chore: remove commented out code --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 1f1741f3b..2a156e5c1 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -99,7 +99,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do ) :ok - # GenServer.call(pid, :rollback_migrations) end @doc """ From 89fc297e2fe16b4d5d64f02d115e7ef472fd4787 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 21:52:43 +0800 Subject: [PATCH 29/62] chore: renamed functions, condensed tests --- .../backends/adaptor/postgres_adaptor.ex | 25 ++-- .../backends/adaptor/postgres_adaptor/repo.ex | 38 ++++-- lib/logflare/endpoints.ex | 4 +- .../adaptor/postgres_adaptor/repo_test.exs | 103 ---------------- .../backends/postgres_adaptor_test.exs | 115 ++++++++++++------ test/logflare/endpoints_test.exs | 6 +- 6 files changed, 130 insertions(+), 161 deletions(-) delete mode 100644 test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 2a156e5c1..e4f996038 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -8,9 +8,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do ### On Source Backend creation: * Broadway pipeline for ingestion: Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline * MemoryBuffer for buffering log events: Logflare.Buffers.MemoryBuffer - * Dynamically created Ecto.Repo created for configured PSQL URL: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.new_repository_for_source_backend - * Dynamically loaded Ecto.Repo connects: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.connect_to_source_backend - * Dynamically loaded Ecto.Repo runs migrations required to work: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.create_log_event_table + * Dynamically created Ecto.Repo created for configured PSQL URL: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.create_repo + * Dynamically loaded Ecto.Repo connects: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.connect_to_repo + * Dynamically loaded Ecto.Repo runs migrations required to work: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.create_log_events_table ## On LogEvent ingestion: On a new event, the Postgres Pipeline will consume the event and store it into the dynamically loaded Logflare.Backends.Adaptor.PostgresAdaptor.Repo. @@ -43,9 +43,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do with source_id <- source_backend.source_id, {:ok, _} <- Registry.register(SourceDispatcher, source_id, {__MODULE__, :ingest}), {:ok, buffer_pid} <- MemoryBuffer.start_link([]), - repository_module <- __MODULE__.Repo.new_repository_for_source_backend(source_backend), - :ok <- __MODULE__.Repo.connect_to_source_backend(repository_module, source_backend), - :ok <- __MODULE__.Repo.create_log_event_table(repository_module, source_backend) do + repository_module <- __MODULE__.Repo.create_repo(source_backend), + :ok <- __MODULE__.Repo.connect_to_repo(source_backend), + :ok <- __MODULE__.Repo.create_log_events_table(source_backend) do state = %__MODULE__{ buffer_module: MemoryBuffer, buffer_pid: buffer_pid, @@ -84,12 +84,21 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do GenServer.call(pid, {:execute_query, query}) end + # expose PgRepo functions + defdelegate create_repo(source_backend), to: __MODULE__.Repo + defdelegate connect_to_repo(repo, source_backend, opts), to: __MODULE__.Repo + defdelegate table_name(source_or_source_backend), to: __MODULE__.Repo + defdelegate connect_to_repo(source_backend), to: __MODULE__.Repo + defdelegate connect_to_repo(source_backend, opts), to: __MODULE__.Repo + defdelegate create_log_events_table(source_backend), to: __MODULE__.Repo + defdelegate create_log_events_table(source_backend, override_migrations), to: __MODULE__.Repo + @doc """ Rolls back all migrations """ @spec rollback_migrations(SourceBackend.t()) :: :ok def rollback_migrations(source_backend) do - repository_module = __MODULE__.Repo.new_repository_for_source_backend(source_backend) + repository_module = __MODULE__.Repo.create_repo(source_backend) Ecto.Migrator.run( repository_module, @@ -106,7 +115,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do """ @spec drop_migrations_table(SourceBackend.t()) :: :ok def drop_migrations_table(source_backend) do - repository_module = __MODULE__.Repo.new_repository_for_source_backend(source_backend) + repository_module = __MODULE__.Repo.create_repo(source_backend) migrations_table = migrations_table_name(source_backend) Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migrations_table}") :ok diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex index e3263ffed..114eb5a30 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex @@ -20,10 +20,10 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do adapter: Ecto.Adapters.Postgres end) - @spec new_repository_for_source_backend(SourceBackend.t()) :: atom() - def new_repository_for_source_backend(source_backend) do + @spec create_repo(SourceBackend.t()) :: atom() + def create_repo(source_backend) do source_backend = Repo.preload(source_backend, :source) - name = Module.concat([Logflare.Repo.Postgres, "Adaptor#{source_backend.source.token}"]) + name = get_repo_module(source_backend) case Code.ensure_compiled(name) do {:module, _} -> nil @@ -36,27 +36,45 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do name end - @spec connect_to_source_backend(Ecto.Repo.t(), SourceBackend.t(), Keyword.t()) :: :ok - def connect_to_source_backend(repository_module, %SourceBackend{config: config}, opts \\ []) do - unless Process.whereis(repository_module) do + @doc """ + Retrieves the repo module. Requires `:source` to be preloaded. + """ + @spec get_repo_module(SourceBackend.t()) :: Ecto.Repo.t() + def get_repo_module(%SourceBackend{source: %Source{token: token}} = source_backend) do + Module.concat([Logflare.Repo.Postgres, "Adaptor#{token}"]) + end + + @doc """ + Connects to a given postgres. Requires `:source` to be preloaded. + """ + @spec connect_to_repo(SourceBackend.t(), Keyword.t()) :: :ok + def connect_to_repo(%SourceBackend{config: config} = source_backend, opts \\ []) do + repo = get_repo_module(source_backend) + + unless Process.whereis(repo) do pool_size = Keyword.get(Application.get_env(:logflare, :postgres_backend_adapter), :pool_size, 10) + # use same pool type as Logflare.Repo + pool = Keyword.get(Application.get_env(:logflare, Logflare.Repo), :pool) + opts = [ {:url, config["url"] || config.url}, - {:name, repository_module}, + {:name, repo}, + {:pool, pool}, {:pool_size, pool_size} | opts ] - {:ok, _} = DynamicSupervisor.start_child(Supervisor, repository_module.child_spec(opts)) + {:ok, _} = DynamicSupervisor.start_child(Supervisor, repo.child_spec(opts)) end :ok end - @spec create_log_event_table(Ecto.Repo.t(), SourceBackend.t(), list() | nil) :: + @spec create_log_events_table(SourceBackend.t(), list() | nil) :: :ok | {:error, :failed_migration} - def create_log_event_table(repository_module, source_backend, override_migrations \\ nil) do + def create_log_events_table(source_backend, override_migrations \\ nil) do + repository_module = get_repo_module(source_backend) migrations = if override_migrations, do: override_migrations, else: migrations(source_backend) Ecto.Migrator.run(repository_module, migrations, :up, all: true) diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index 58d80d9a8..f9bd1d549 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -309,8 +309,8 @@ defmodule Logflare.Endpoints do do: {:error, "Postgres does not support multiple sources"} defp exec_sql_on_pg(%{source_backends: [source_backend]}, _, transformed_query, _, input_params) do - with repo <- PostgresAdaptorRepo.new_repository_for_source_backend(source_backend), - :ok <- PostgresAdaptorRepo.connect_to_source_backend(repo, source_backend), + with repo <- PostgresAdaptorRepo.create_repo(source_backend), + :ok <- PostgresAdaptorRepo.connect_to_repo(repo, source_backend), {:ok, result} <- SQL.query(repo, transformed_query, Map.to_list(input_params)), %{columns: columns, rows: rows} <- result do rows = Enum.map(rows, fn row -> columns |> Enum.zip(row) |> Map.new() end) diff --git a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs b/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs deleted file mode 100644 index 4a7825439..000000000 --- a/test/logflare/backends/adaptor/postgres_adaptor/repo_test.exs +++ /dev/null @@ -1,103 +0,0 @@ -defmodule Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest do - use Logflare.DataCase, async: false - - alias Logflare.Backends.Adaptor.PostgresAdaptor - alias Logflare.Backends.Adaptor.PostgresAdaptor.RepoTest.BadMigration - - import Ecto.Query - - setup do - %{username: username, password: password, database: database, hostname: hostname} = - Application.get_env(:logflare, Logflare.Repo) |> Map.new() - - url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" - - source_backend = - insert(:source_backend, - type: :postgres, - config: %{"url" => url}, - source: insert(:source, user: insert(:user)) - ) - - %{source_backend: source_backend} - end - - describe "new_repository_for_source_backend/1" do - test "creates a new Ecto.Repo for given source_backend", %{source_backend: source_backend} do - repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) - assert Keyword.get(repository_module.__info__(:attributes), :behaviour) == [Ecto.Repo] - end - - test "name of the module uses source_id", %{source_backend: source_backend} do - repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) - - assert repository_module == - Module.concat([Logflare.Repo.Postgres, "Adaptor#{source_backend.source.token}"]) - end - end - - describe "create_log_event_table/1" do - setup %{source_backend: source_backend} do - repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) - - :ok = - PostgresAdaptor.Repo.connect_to_source_backend(repository_module, source_backend, - pool: Ecto.Adapters.SQL.Sandbox - ) - - Ecto.Adapters.SQL.Sandbox.mode(repository_module, :auto) - - on_exit(fn -> - Ecto.Migrator.run( - repository_module, - PostgresAdaptor.Repo.migrations(source_backend), - :down, - all: true - ) - - migration_table = Keyword.get(repository_module.config(), :migration_source) - Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") - true = repository_module |> Process.whereis() |> Process.exit(:normal) - end) - - %{repository_module: repository_module} - end - - test "runs migration for the newly created connection", %{ - source_backend: source_backend, - repository_module: repository_module - } do - assert PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) == :ok - - query = - from(l in PostgresAdaptor.Repo.table_name(source_backend), - select: PostgresAdaptor.LogEvent - ) - - assert repository_module.all(query) == [] - end - - test "handle migration errors", %{ - source_backend: source_backend, - repository_module: repository_module - } do - bad_migrations = [{0, BadMigration}] - - assert {:error, :failed_migration} = - PostgresAdaptor.Repo.create_log_event_table( - repository_module, - source_backend, - bad_migrations - ) - end - end - - defmodule BadMigration do - use Ecto.Migration - - def up do - alter table(:none) do - end - end - end -end diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index 6f8c06c3d..0d2a30390 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -8,49 +8,94 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do setup do repo = Application.get_env(:logflare, Logflare.Repo) - url = - "postgresql://#{repo[:username]}:#{repo[:password]}@#{repo[:hostname]}/#{repo[:database]}" - config = %{ - "url" => url + "url" => + "postgresql://#{repo[:username]}:#{repo[:password]}@#{repo[:hostname]}/#{repo[:database]}" } source = insert(:source, user: insert(:user)) source_backend = insert(:source_backend, type: :postgres, source: source, config: config) - pid = start_supervised!({PostgresAdaptor, source_backend}) + %{source_backend: source_backend} + end + + describe "with postgres repo" do + setup %{source_backend: source_backend} do + pid = start_supervised!({PostgresAdaptor, source_backend}) + + on_exit(fn -> + PostgresAdaptor.rollback_migrations(source_backend) + PostgresAdaptor.drop_migrations_table(source_backend) + end) + + %{pid: pid} + end + + test "ingest/2 and execute_query/2 dispatched message", %{ + pid: pid, + source_backend: source_backend + } do + log_event = + build(:log_event, + source: source_backend.source, + test: "data" + ) + + assert :ok = PostgresAdaptor.ingest(pid, [log_event]) + + # TODO: replace with a timeout retry func + :timer.sleep(1_500) - on_exit(fn -> - PostgresAdaptor.rollback_migrations(source_backend) - PostgresAdaptor.drop_migrations_table(source_backend) - end) + query = + from(l in PostgresAdaptor.Repo.table_name(source_backend), + select: l.body + ) - %{pid: pid, source_backend: source_backend} + assert [ + %{ + "test" => "data" + } + ] = PostgresAdaptor.execute_query(pid, query) + end end - test "ingest/2 and execute_query/2 dispatched message", %{ - pid: pid, - source_backend: source_backend - } do - log_event = - build(:log_event, - source: source_backend.source, - test: "data" - ) - - assert :ok = PostgresAdaptor.ingest(pid, [log_event]) - - # TODO: replace with a timeout retry func - :timer.sleep(1_500) - - query = - from(l in PostgresAdaptor.Repo.table_name(source_backend), - select: l.body - ) - - assert [ - %{ - "test" => "data" - } - ] = PostgresAdaptor.execute_query(pid, query) + describe "repo module" do + test "create_repo/1 creates a new Ecto.Repo for given source_backend", %{ + source_backend: source_backend + } do + repo = PostgresAdaptor.create_repo(source_backend) + assert Keyword.get(repo.__info__(:attributes), :behaviour) == [Ecto.Repo] + + # module name should use source token + assert Atom.to_string(repo) =~ Atom.to_string(source_backend.source.token) + end + + test "create_log_events_table/3 creates the table for a given source", %{ + source_backend: source_backend + } do + repo = PostgresAdaptor.create_repo(source_backend) + assert :ok = PostgresAdaptor.connect_to_repo(source_backend) + assert :ok = PostgresAdaptor.create_log_events_table(source_backend) + query = from(l in PostgresAdaptor.table_name(source_backend), select: l.body) + assert repo.all(query) == [] + end + + test "handle migration errors", %{source_backend: source_backend} do + repo = PostgresAdaptor.create_repo(source_backend) + assert :ok = PostgresAdaptor.connect_to_repo(source_backend) + bad_migrations = [{0, BadMigration}] + + assert {:error, :failed_migration} = + PostgresAdaptor.Repo.create_log_events_table(source_backend, bad_migrations) + end + end +end + +defmodule BadMigration do + @moduledoc false + use Ecto.Migration + + def up do + alter table(:none) do + end end end diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index 832a8d79b..3a33c5f64 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -215,14 +215,14 @@ defmodule Logflare.EndpointsTest do source: source ) - repository_module = PostgresAdaptor.Repo.new_repository_for_source_backend(source_backend) + repository_module = PostgresAdaptor.Repo.create_repo(source_backend) :ok = - PostgresAdaptor.Repo.connect_to_source_backend(repository_module, source_backend, + PostgresAdaptor.Repo.connect_to_repo(repository_module, source_backend, pool: Ecto.Adapters.SQL.Sandbox ) - :ok = PostgresAdaptor.Repo.create_log_event_table(repository_module, source_backend) + :ok = PostgresAdaptor.Repo.create_log_events_table(source_backend) on_exit(fn -> Ecto.Migrator.run( From aa34fb9478d7689b49a2ed7efbaa22236581e197 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 22:03:10 +0800 Subject: [PATCH 30/62] chore: added log capture assertion, fix compilation warning, formatting --- test/logflare/backends/postgres_adaptor_test.exs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index 0d2a30390..be1ff879a 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -5,6 +5,8 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do import Ecto.Query + import ExUnit.CaptureLog + setup do repo = Application.get_env(:logflare, Logflare.Repo) @@ -80,12 +82,17 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do end test "handle migration errors", %{source_backend: source_backend} do - repo = PostgresAdaptor.create_repo(source_backend) + PostgresAdaptor.create_repo(source_backend) assert :ok = PostgresAdaptor.connect_to_repo(source_backend) bad_migrations = [{0, BadMigration}] - assert {:error, :failed_migration} = - PostgresAdaptor.Repo.create_log_events_table(source_backend, bad_migrations) + assert capture_log(fn -> + assert {:error, :failed_migration} = + PostgresAdaptor.Repo.create_log_events_table( + source_backend, + bad_migrations + ) + end) =~ "[error]" end end end From 19898d133f0434c9df58e50ac42f4dd687949c23 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 02:35:13 +0800 Subject: [PATCH 31/62] feat: execute string on postgres --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 15 +++++++++++++++ test/logflare/backends/postgres_adaptor_test.exs | 13 +++++++++++++ 2 files changed, 28 insertions(+) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index e4f996038..618bb4dbc 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -142,4 +142,19 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do result = mod.all(query) {:reply, result, state} end + + @impl true + def handle_call({:execute_query, query_string}, _from, state) when is_binary(query_string) do + mod = state.repository_module + result = Ecto.Adapters.SQL.query!(mod, query_string) + + rows = + for row <- result.rows do + for {cell, index} <- Enum.with_index(row), into: %{} do + {Enum.at(result.columns, index), cell} + end + end + + {:reply, rows, state} + end end diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index be1ff879a..cf8a833d2 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -57,6 +57,19 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do "test" => "data" } ] = PostgresAdaptor.execute_query(pid, query) + + # query by string + assert [ + %{ + "body" => %{ + "test" => "data" + } + } + ] = + PostgresAdaptor.execute_query( + pid, + "select body from #{PostgresAdaptor.table_name(source_backend)}" + ) end end From 10942ba5513f029701332ebcaaf51783bc7d0e58 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 02:35:38 +0800 Subject: [PATCH 32/62] chore: rename PostgresAdaptor.Repo to PostgresAdaptor.PgRepo --- .../backends/adaptor/postgres_adaptor.ex | 60 ++++--------------- .../postgres_adaptor/{repo.ex => pg_repo.ex} | 46 ++++++++++++-- .../adaptor/postgres_adaptor/pipeline.ex | 4 +- .../repo/migrations/add_log_events.ex | 4 +- lib/logflare/endpoints.ex | 6 +- lib/logflare/sql_v2.ex | 4 +- .../backends/postgres_adaptor_test.exs | 5 +- 7 files changed, 66 insertions(+), 63 deletions(-) rename lib/logflare/backends/adaptor/postgres_adaptor/{repo.ex => pg_repo.ex} (74%) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 618bb4dbc..aa29db7ec 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -23,7 +23,8 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do alias Logflare.Backends.SourceBackend alias Logflare.Backends.SourceDispatcher alias Logflare.Buffers.MemoryBuffer - alias Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline + alias __MODULE__.Pipeline + alias __MODULE__.PgRepo typedstruct enforce: true do field(:buffer_module, Adaptor.t()) @@ -43,9 +44,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do with source_id <- source_backend.source_id, {:ok, _} <- Registry.register(SourceDispatcher, source_id, {__MODULE__, :ingest}), {:ok, buffer_pid} <- MemoryBuffer.start_link([]), - repository_module <- __MODULE__.Repo.create_repo(source_backend), - :ok <- __MODULE__.Repo.connect_to_repo(source_backend), - :ok <- __MODULE__.Repo.create_log_events_table(source_backend) do + repository_module <- create_repo(source_backend), + :ok <- connect_to_repo(source_backend), + :ok <- create_log_events_table(source_backend) do state = %__MODULE__{ buffer_module: MemoryBuffer, buffer_pid: buffer_pid, @@ -85,49 +86,14 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do end # expose PgRepo functions - defdelegate create_repo(source_backend), to: __MODULE__.Repo - defdelegate connect_to_repo(repo, source_backend, opts), to: __MODULE__.Repo - defdelegate table_name(source_or_source_backend), to: __MODULE__.Repo - defdelegate connect_to_repo(source_backend), to: __MODULE__.Repo - defdelegate connect_to_repo(source_backend, opts), to: __MODULE__.Repo - defdelegate create_log_events_table(source_backend), to: __MODULE__.Repo - defdelegate create_log_events_table(source_backend, override_migrations), to: __MODULE__.Repo - - @doc """ - Rolls back all migrations - """ - @spec rollback_migrations(SourceBackend.t()) :: :ok - def rollback_migrations(source_backend) do - repository_module = __MODULE__.Repo.create_repo(source_backend) - - Ecto.Migrator.run( - repository_module, - __MODULE__.Repo.migrations(source_backend), - :down, - all: true - ) - - :ok - end - - @doc """ - Drops the migration table - """ - @spec drop_migrations_table(SourceBackend.t()) :: :ok - def drop_migrations_table(source_backend) do - repository_module = __MODULE__.Repo.create_repo(source_backend) - migrations_table = migrations_table_name(source_backend) - Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migrations_table}") - :ok - end - - @doc """ - Returns the migrations table name used for a given source - """ - @spec migrations_table_name(SourceBackend.t()) :: String.t() - def migrations_table_name(%SourceBackend{source_id: source_id}) do - "schema_migrations_#{source_id}" - end + defdelegate create_repo(source_backend), to: PgRepo + defdelegate connect_to_repo(source_backend), to: PgRepo + defdelegate table_name(source_or_source_backend), to: PgRepo + defdelegate create_log_events_table(source_backend), to: PgRepo + defdelegate create_log_events_table(source_backend, override_migrations), to: PgRepo + defdelegate rollback_migrations(source_backend), to: PgRepo + defdelegate drop_migrations_table(source_backend), to: PgRepo + defdelegate migrations_table_name(source_backend), to: PgRepo # GenServer @impl true diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex similarity index 74% rename from lib/logflare/backends/adaptor/postgres_adaptor/repo.ex rename to lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex index 114eb5a30..c011620cc 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex @@ -1,4 +1,4 @@ -defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do +defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do @moduledoc """ Creates a Ecto.Repo for a source backend configuration, runs migrations and connects to it. @@ -40,15 +40,15 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do Retrieves the repo module. Requires `:source` to be preloaded. """ @spec get_repo_module(SourceBackend.t()) :: Ecto.Repo.t() - def get_repo_module(%SourceBackend{source: %Source{token: token}} = source_backend) do + def get_repo_module(%SourceBackend{source: %Source{token: token}}) do Module.concat([Logflare.Repo.Postgres, "Adaptor#{token}"]) end @doc """ Connects to a given postgres. Requires `:source` to be preloaded. """ - @spec connect_to_repo(SourceBackend.t(), Keyword.t()) :: :ok - def connect_to_repo(%SourceBackend{config: config} = source_backend, opts \\ []) do + @spec connect_to_repo(SourceBackend.t()) :: :ok + def connect_to_repo(%SourceBackend{config: config} = source_backend) do repo = get_repo_module(source_backend) unless Process.whereis(repo) do @@ -62,7 +62,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do {:url, config["url"] || config.url}, {:name, repo}, {:pool, pool}, - {:pool_size, pool_size} | opts + {:pool_size, pool_size} ] {:ok, _} = DynamicSupervisor.start_child(Supervisor, repo.child_spec(opts)) @@ -100,4 +100,40 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo do @spec migrations(SourceBackend.t()) :: list({pos_integer(), atom()}) def migrations(source_backend), do: [{1, AddLogEvents.generate_migration(source_backend)}] + + @doc """ + Rolls back all migrations + """ + @spec rollback_migrations(SourceBackend.t()) :: :ok + def rollback_migrations(source_backend) do + repository_module = create_repo(source_backend) + + Ecto.Migrator.run( + repository_module, + migrations(source_backend), + :down, + all: true + ) + + :ok + end + + @doc """ + Drops the migration table + """ + @spec drop_migrations_table(SourceBackend.t()) :: :ok + def drop_migrations_table(source_backend) do + repository_module = create_repo(source_backend) + migrations_table = migrations_table_name(source_backend) + Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migrations_table}") + :ok + end + + @doc """ + Returns the migrations table name used for a given source + """ + @spec migrations_table_name(SourceBackend.t()) :: String.t() + def migrations_table_name(%SourceBackend{source_id: source_id}) do + "schema_migrations_#{source_id}" + end end diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex index 2d14f834b..db07f79b8 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex @@ -8,7 +8,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline do alias Broadway.Message alias Logflare.Backends.Adaptor.PostgresAdaptor.LogEvent - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo + alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Buffers.BufferProducer @spec start_link(PostgresAdaptor.t()) :: {:ok, pid()} @@ -56,7 +56,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline do changeset = %LogEvent{} - |> Ecto.put_meta(source: Repo.table_name(source_backend)) + |> Ecto.put_meta(source: PostgresAdaptor.table_name(source_backend)) |> LogEvent.changeset(params) repository_module.insert(changeset) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/repo/migrations/add_log_events.ex b/lib/logflare/backends/adaptor/postgres_adaptor/repo/migrations/add_log_events.ex index eef6af0e3..9d3dc4a69 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/repo/migrations/add_log_events.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/repo/migrations/add_log_events.ex @@ -4,10 +4,10 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Repo.Migrations.AddLogEvents """ use Ecto.Migration - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo + alias Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo def generate_migration(source_backend) do - table_name = Repo.table_name(source_backend) + table_name = PgRepo.table_name(source_backend) name = Module.concat([__MODULE__, "MigrationFor#{table_name}"]) ast = diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index f9bd1d549..b202d2e1e 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -1,7 +1,7 @@ defmodule Logflare.Endpoints do @moduledoc false alias Ecto.Adapters.SQL - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo, as: PostgresAdaptorRepo + alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Endpoints.Cache alias Logflare.Endpoints.Query alias Logflare.Endpoints.Resolver @@ -309,8 +309,8 @@ defmodule Logflare.Endpoints do do: {:error, "Postgres does not support multiple sources"} defp exec_sql_on_pg(%{source_backends: [source_backend]}, _, transformed_query, _, input_params) do - with repo <- PostgresAdaptorRepo.create_repo(source_backend), - :ok <- PostgresAdaptorRepo.connect_to_repo(repo, source_backend), + with repo <- PostgresAdaptor.create_repo(source_backend), + :ok <- PostgresAdaptor.connect_to_repo(source_backend), {:ok, result} <- SQL.query(repo, transformed_query, Map.to_list(input_params)), %{columns: columns, rows: rows} <- result do rows = Enum.map(rows, fn row -> columns |> Enum.zip(row) |> Map.new() end) diff --git a/lib/logflare/sql_v2.ex b/lib/logflare/sql_v2.ex index 9e10e8d41..2562af7bb 100644 --- a/lib/logflare/sql_v2.ex +++ b/lib/logflare/sql_v2.ex @@ -8,7 +8,7 @@ defmodule Logflare.SqlV2 do alias Logflare.User alias Logflare.SingleTenant alias Logflare.SqlV2.Parser - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo + alias Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo @doc """ Transforms and validates an SQL query for querying with bigquery.any() @@ -47,7 +47,7 @@ defmodule Logflare.SqlV2 do |> Enum.map(fn from -> {_, updated} = get_and_update_in(from, ["relation", "Table", "name"], fn [%{"value" => source}] = value -> - table_name = source_mapping |> Map.get(source) |> Repo.table_name() + table_name = source_mapping |> Map.get(source) |> PgRepo.table_name() {value, [%{"quote_style" => nil, "value" => table_name}]} end) diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index cf8a833d2..257d1e5b2 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -47,8 +47,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do # TODO: replace with a timeout retry func :timer.sleep(1_500) + # query by Ecto.Query query = - from(l in PostgresAdaptor.Repo.table_name(source_backend), + from(l in PostgresAdaptor.table_name(source_backend), select: l.body ) @@ -101,7 +102,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do assert capture_log(fn -> assert {:error, :failed_migration} = - PostgresAdaptor.Repo.create_log_events_table( + PostgresAdaptor.create_log_events_table( source_backend, bad_migrations ) From f3134c72e41038fd2aceaebb83c268ae66e44d26 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 02:43:58 +0800 Subject: [PATCH 33/62] chore: fix failing tests --- lib/logflare/endpoints.ex | 2 ++ test/logflare/endpoints_test.exs | 25 ++++++------------------- 2 files changed, 8 insertions(+), 19 deletions(-) diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index b202d2e1e..1621a5cf0 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -309,6 +309,8 @@ defmodule Logflare.Endpoints do do: {:error, "Postgres does not support multiple sources"} defp exec_sql_on_pg(%{source_backends: [source_backend]}, _, transformed_query, _, input_params) do + source_backend = Repo.preload(source_backend, :source) + with repo <- PostgresAdaptor.create_repo(source_backend), :ok <- PostgresAdaptor.connect_to_repo(source_backend), {:ok, result} <- SQL.query(repo, transformed_query, Map.to_list(input_params)), diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index 3a33c5f64..216a24b21 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -215,29 +215,16 @@ defmodule Logflare.EndpointsTest do source: source ) - repository_module = PostgresAdaptor.Repo.create_repo(source_backend) - - :ok = - PostgresAdaptor.Repo.connect_to_repo(repository_module, source_backend, - pool: Ecto.Adapters.SQL.Sandbox - ) - - :ok = PostgresAdaptor.Repo.create_log_events_table(source_backend) + PostgresAdaptor.create_repo(source_backend) + PostgresAdaptor.connect_to_repo(source_backend) + PostgresAdaptor.create_log_events_table(source_backend) on_exit(fn -> - Ecto.Migrator.run( - repository_module, - PostgresAdaptor.Repo.migrations(source_backend), - :down, - all: true - ) - - migration_table = Keyword.get(repository_module.config(), :migration_source) - Ecto.Adapters.SQL.query!(repository_module, "DROP TABLE IF EXISTS #{migration_table}") - true = repository_module |> Process.whereis() |> Process.exit(:normal) + PostgresAdaptor.rollback_migrations(source_backend) + PostgresAdaptor.drop_migrations_table(source_backend) end) - %{source: source, user: user} + %{source: source, user: user} end test "run an endpoint query without caching", %{source: source, user: user} do From 3885364211af6edec990a864d10ff8f21b6d44de Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 02:51:33 +0800 Subject: [PATCH 34/62] chore: consolidate SQL tests --- test/logflare/sql_test.exs | 18 ++++++++++++++++++ test/logflare/sql_v2_test.exs | 24 ------------------------ 2 files changed, 18 insertions(+), 24 deletions(-) delete mode 100644 test/logflare/sql_v2_test.exs diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs index 9af983323..c2c31cf5d 100644 --- a/test/logflare/sql_test.exs +++ b/test/logflare/sql_test.exs @@ -3,6 +3,7 @@ defmodule Logflare.SqlTest do use Logflare.DataCase alias Logflare.SingleTenant alias Logflare.SqlV2 + alias Logflare.Backends.Adaptor.PostgresAdaptor @logflare_project_id "logflare-project-id" @user_project_id "user-project-id" @user_dataset_id "user-dataset-id" @@ -356,4 +357,21 @@ defmodule Logflare.SqlTest do "`#{project_id}.#{dataset_id}.#{token}`" end + + describe "transform/3 for :postgres backends" do + setup do + user = insert(:user) + source = insert(:source, user: user, name: "source_#{TestUtils.random_string()}") + %{user: user, source: source} + end + + test "changes query on FROM command to correct table name", %{ + source: %{name: name} = source, + user: user + } do + input = "SELECT body, event_message, timestamp FROM #{name}" + expected = {:ok, "SELECT body, event_message, timestamp FROM #{PostgresAdaptor.table_name(source)}"} + assert SqlV2.transform(:postgres, input, user) == expected + end + end end diff --git a/test/logflare/sql_v2_test.exs b/test/logflare/sql_v2_test.exs deleted file mode 100644 index c3d3d74a5..000000000 --- a/test/logflare/sql_v2_test.exs +++ /dev/null @@ -1,24 +0,0 @@ -defmodule Logflare.SqlV2Test do - use Logflare.DataCase - - alias Logflare.TestUtils - alias Logflare.SqlV2 - alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo - - describe "transform/3 for :postgres backends" do - setup do - user = insert(:user) - source = insert(:source, user: user, name: "source_#{TestUtils.random_string()}") - %{user: user, source: source} - end - - test "changes query on FROM command to correct table name", %{ - source: %{name: name} = source, - user: user - } do - input = "SELECT body, event_message, timestamp FROM #{name}" - expected = {:ok, "SELECT body, event_message, timestamp FROM #{Repo.table_name(source)}"} - assert SqlV2.transform(:postgres, input, user) == expected - end - end -end From 2ee86bff1edb8ec909906a8836321a311eafcf39 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 02:56:28 +0800 Subject: [PATCH 35/62] chore: refactor raw string query exec --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index aa29db7ec..79d584b1d 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -114,12 +114,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do mod = state.repository_module result = Ecto.Adapters.SQL.query!(mod, query_string) - rows = - for row <- result.rows do - for {cell, index} <- Enum.with_index(row), into: %{} do - {Enum.at(result.columns, index), cell} - end - end + rows = for row <- result.rows do + result.columns |> Enum.zip(row) |> Map.new() + end {:reply, rows, state} end From ba0f0879c076eed66cf086d9153c4e24964d34eb Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 03:01:46 +0800 Subject: [PATCH 36/62] Add documentation --- .../adaptor/postgres_adaptor/pg_repo.ex | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex index c011620cc..b12169114 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex @@ -20,9 +20,12 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do adapter: Ecto.Adapters.Postgres end) + @doc """ + Dynamically compiles a new Ecto.Repo module for a given source. + Requires `:source` to be preloaded. + """ @spec create_repo(SourceBackend.t()) :: atom() - def create_repo(source_backend) do - source_backend = Repo.preload(source_backend, :source) + def create_repo(%SourceBackend{source: %_{}} = source_backend) do name = get_repo_module(source_backend) case Code.ensure_compiled(name) do @@ -71,6 +74,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do :ok end + @doc """ + Creates the Log Events table for the given source. + """ @spec create_log_events_table(SourceBackend.t(), list() | nil) :: :ok | {:error, :failed_migration} def create_log_events_table(source_backend, override_migrations \\ nil) do @@ -85,11 +91,13 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do {:error, :failed_migration} end + + @doc """ + Returns the table name for a given Source or SourceBackend. + If SourceBackend, :source must be preloaded. + """ @spec table_name(SourceBackend.t() | Source.t()) :: binary() - def table_name(%SourceBackend{} = source_backend) do - %{source: source} = Repo.preload(source_backend, :source) - table_name(source) - end + def table_name(%SourceBackend{source: %_{} = source}), do: table_name(source) def table_name(%Source{token: token}) do token @@ -98,6 +106,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do |> then(&"log_events_#{&1}") end + @doc """ + Retunrns a list of migrations to run. + """ @spec migrations(SourceBackend.t()) :: list({pos_integer(), atom()}) def migrations(source_backend), do: [{1, AddLogEvents.generate_migration(source_backend)}] From 7a15e34ceda3e7ce5cf80fab50e84711443771a1 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 03:04:35 +0800 Subject: [PATCH 37/62] docs: update module doc --- .../backends/adaptor/postgres_adaptor.ex | 20 ++++++------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 79d584b1d..935f3907a 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -1,19 +1,11 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do @moduledoc """ - The PostgresAdaptor is a backend adaptor for the Postgres database. - - ## Configuration - We store the PSQL URL address to whom we will be connected to - ## How it works - ### On Source Backend creation: - * Broadway pipeline for ingestion: Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline - * MemoryBuffer for buffering log events: Logflare.Buffers.MemoryBuffer - * Dynamically created Ecto.Repo created for configured PSQL URL: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.create_repo - * Dynamically loaded Ecto.Repo connects: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.connect_to_repo - * Dynamically loaded Ecto.Repo runs migrations required to work: Logflare.Backends.Adaptor.PostgresAdaptor.Repo.create_log_events_table - - ## On LogEvent ingestion: - On a new event, the Postgres Pipeline will consume the event and store it into the dynamically loaded Logflare.Backends.Adaptor.PostgresAdaptor.Repo. + The backend adaptor for the Postgres database. + + Config: + `:url` - the database connection string + + On ingest, pipeline will insert it into the log event table for the given source. """ use GenServer use TypedStruct From 1571d5eb0ebb5d205627ceb098cd5b91e29941ab Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 03:13:39 +0800 Subject: [PATCH 38/62] chore: move insert logic to PgRepo --- .../backends/adaptor/postgres_adaptor.ex | 1 + .../{log_event.ex => pg_log_event.ex} | 7 ++--- .../adaptor/postgres_adaptor/pg_repo.ex | 29 +++++++++++++++++++ .../adaptor/postgres_adaptor/pipeline.ex | 24 ++------------- 4 files changed, 35 insertions(+), 26 deletions(-) rename lib/logflare/backends/adaptor/postgres_adaptor/{log_event.ex => pg_log_event.ex} (78%) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 935f3907a..0d1f61dc8 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -86,6 +86,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do defdelegate rollback_migrations(source_backend), to: PgRepo defdelegate drop_migrations_table(source_backend), to: PgRepo defdelegate migrations_table_name(source_backend), to: PgRepo + defdelegate insert_log_event(source_backend, log_event), to: PgRepo # GenServer @impl true diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/log_event.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pg_log_event.ex similarity index 78% rename from lib/logflare/backends/adaptor/postgres_adaptor/log_event.ex rename to lib/logflare/backends/adaptor/postgres_adaptor/pg_log_event.ex index 1565342bf..efb39769b 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/log_event.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pg_log_event.ex @@ -1,13 +1,12 @@ -defmodule Logflare.Backends.Adaptor.PostgresAdaptor.LogEvent do +defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgLogEvent do @moduledoc """ Logflare Log Event schema to be used by the Postgres Adaptor """ - use Ecto.Schema + use TypedEctoSchema import Ecto.Changeset @primary_key {:id, :string, []} - - schema "log_event" do + typed_schema "log_event" do field(:body, :map) field(:event_message, :string) field(:timestamp, :utc_datetime_usec) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex index b12169114..ca32dadcb 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex @@ -8,9 +8,11 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do alias Logflare.Backends.Adaptor.PostgresAdaptor.Repo.Migrations.AddLogEvents alias Logflare.Backends.Adaptor.PostgresAdaptor.Supervisor alias Logflare.Backends.Adaptor.PostgresAdaptor + alias Logflare.Backends.Adaptor.PostgresAdaptor.PgLogEvent alias Logflare.Backends.SourceBackend alias Logflare.Repo alias Logflare.Source + alias Logflare.LogEvent require Logger @@ -147,4 +149,31 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do def migrations_table_name(%SourceBackend{source_id: source_id}) do "schema_migrations_#{source_id}" end + + @doc """ + Inserts a LogEvent into the given source backend table + """ + @spec insert_log_event(SourceBackend.t(), LogEvent.t()) :: {:ok, PgLogEvent.t()} + def insert_log_event(source_backend, %LogEvent{} = log_event) do + repo = get_repo_module(source_backend) + table = PostgresAdaptor.table_name(source_backend) + + timestamp = + log_event.body["timestamp"] + |> DateTime.from_unix!(:microsecond) + |> DateTime.to_naive() + + params = %{ + id: log_event.body["id"], + event_message: log_event.body["event_message"], + timestamp: timestamp, + body: log_event.body + } + + changeset = + %PgLogEvent{} + |> Ecto.put_meta(source: table) + |> PgLogEvent.changeset(params) + repo.insert(changeset) + end end diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex index db07f79b8..cd7c73f55 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pipeline.ex @@ -7,7 +7,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline do use Broadway alias Broadway.Message - alias Logflare.Backends.Adaptor.PostgresAdaptor.LogEvent alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Buffers.BufferProducer @@ -39,27 +38,8 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline do Message.update_data(message, &process_data(&1, adaptor_state)) end - defp process_data(log_event, adaptor_state) do - %{repository_module: repository_module, source_backend: source_backend} = adaptor_state - - timestamp = - log_event.body["timestamp"] - |> DateTime.from_unix!(:microsecond) - |> DateTime.to_naive() - - params = %{ - id: log_event.body["id"], - event_message: log_event.body["event_message"], - timestamp: timestamp, - body: log_event.body - } - - changeset = - %LogEvent{} - |> Ecto.put_meta(source: PostgresAdaptor.table_name(source_backend)) - |> LogEvent.changeset(params) - - repository_module.insert(changeset) + defp process_data(log_event, %{source_backend: source_backend}) do + PostgresAdaptor.insert_log_event(source_backend, log_event) end def transform(event, _opts) do From 9977600018c30b29565a50a4c8144446ffad452f Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 03:15:12 +0800 Subject: [PATCH 39/62] chore: formatting --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 7 ++++--- lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex | 2 +- test/logflare/endpoints_test.exs | 2 +- test/logflare/sql_test.exs | 5 ++++- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 0d1f61dc8..ce887799a 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -107,9 +107,10 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do mod = state.repository_module result = Ecto.Adapters.SQL.query!(mod, query_string) - rows = for row <- result.rows do - result.columns |> Enum.zip(row) |> Map.new() - end + rows = + for row <- result.rows do + result.columns |> Enum.zip(row) |> Map.new() + end {:reply, rows, state} end diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex index ca32dadcb..dfa696d32 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex @@ -93,7 +93,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do {:error, :failed_migration} end - @doc """ Returns the table name for a given Source or SourceBackend. If SourceBackend, :source must be preloaded. @@ -174,6 +173,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do %PgLogEvent{} |> Ecto.put_meta(source: table) |> PgLogEvent.changeset(params) + repo.insert(changeset) end end diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index 216a24b21..c5adb6189 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -224,7 +224,7 @@ defmodule Logflare.EndpointsTest do PostgresAdaptor.drop_migrations_table(source_backend) end) - %{source: source, user: user} + %{source: source, user: user} end test "run an endpoint query without caching", %{source: source, user: user} do diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs index c2c31cf5d..1709047f9 100644 --- a/test/logflare/sql_test.exs +++ b/test/logflare/sql_test.exs @@ -370,7 +370,10 @@ defmodule Logflare.SqlTest do user: user } do input = "SELECT body, event_message, timestamp FROM #{name}" - expected = {:ok, "SELECT body, event_message, timestamp FROM #{PostgresAdaptor.table_name(source)}"} + + expected = + {:ok, "SELECT body, event_message, timestamp FROM #{PostgresAdaptor.table_name(source)}"} + assert SqlV2.transform(:postgres, input, user) == expected end end From 015ac14ff48c116aa5c810436e3b28373f71d5d6 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 14:28:33 +0800 Subject: [PATCH 40/62] feat: adjust PostgresAdaptor.execute_query/2 to be stateless --- .../backends/adaptor/postgres_adaptor.ex | 41 +++++++++---------- .../backends/postgres_adaptor_test.exs | 4 +- 2 files changed, 21 insertions(+), 24 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index ce887799a..e5aab442c 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -73,8 +73,25 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do def queryable?(), do: true @impl true - def execute_query(pid, query) do - GenServer.call(pid, {:execute_query, query}) + def execute_query(%SourceBackend{} = source_backend, %Ecto.Query{} = query) do + mod = create_repo(source_backend) + :ok = connect_to_repo(source_backend) + result = mod.all(query) + {:ok, result} + end + + def execute_query(%SourceBackend{} = source_backend, query_string) + when is_binary(query_string) do + mod = create_repo(source_backend) + :ok = connect_to_repo(source_backend) + result = Ecto.Adapters.SQL.query!(mod, query_string) + + rows = + for row <- result.rows do + result.columns |> Enum.zip(row) |> Map.new() + end + + {:ok, rows} end # expose PgRepo functions @@ -94,24 +111,4 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do MemoryBuffer.add_many(state.buffer_pid, log_events) {:reply, :ok, state} end - - @impl true - def handle_call({:execute_query, %Ecto.Query{} = query}, _from, state) do - mod = state.repository_module - result = mod.all(query) - {:reply, result, state} - end - - @impl true - def handle_call({:execute_query, query_string}, _from, state) when is_binary(query_string) do - mod = state.repository_module - result = Ecto.Adapters.SQL.query!(mod, query_string) - - rows = - for row <- result.rows do - result.columns |> Enum.zip(row) |> Map.new() - end - - {:reply, rows, state} - end end diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index 257d1e5b2..d459cf235 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -57,7 +57,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do %{ "test" => "data" } - ] = PostgresAdaptor.execute_query(pid, query) + ] = PostgresAdaptor.execute_query(source_backend, query) # query by string assert [ @@ -68,7 +68,7 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do } ] = PostgresAdaptor.execute_query( - pid, + source_backend, "select body from #{PostgresAdaptor.table_name(source_backend)}" ) end From a70dc9883066970bef67aea18d82157324ed188f Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 16:54:29 +0800 Subject: [PATCH 41/62] chore: fix failing PostgresAdaptor tests --- .../backends/postgres_adaptor_test.exs | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index d459cf235..77399a940 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -53,20 +53,22 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do select: l.body ) - assert [ - %{ - "test" => "data" - } - ] = PostgresAdaptor.execute_query(source_backend, query) + assert {:ok, + [ + %{ + "test" => "data" + } + ]} = PostgresAdaptor.execute_query(source_backend, query) # query by string - assert [ - %{ - "body" => %{ - "test" => "data" - } - } - ] = + assert {:ok, + [ + %{ + "body" => %{ + "test" => "data" + } + } + ]} = PostgresAdaptor.execute_query( source_backend, "select body from #{PostgresAdaptor.table_name(source_backend)}" From 0fce7e0dcb53c658a4a4cd5f200133debef8b511 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 19:10:21 +0800 Subject: [PATCH 42/62] chore: fix compilation warnings --- lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex index dfa696d32..9f73b1dea 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor/pg_repo.ex @@ -10,7 +10,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo do alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.Adaptor.PostgresAdaptor.PgLogEvent alias Logflare.Backends.SourceBackend - alias Logflare.Repo alias Logflare.Source alias Logflare.LogEvent From 915419b7026c7c6d223e793e028acc89273bb9c0 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Mon, 17 Jul 2023 16:53:11 +0800 Subject: [PATCH 43/62] chore: updated module reference and import --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index e5aab442c..3fb01b9a0 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -11,12 +11,13 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do use TypedStruct use Logflare.Backends.Adaptor + alias Logflare.Backends.Adaptor.PostgresAdaptor + alias Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline + alias Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo alias Logflare.Backends alias Logflare.Backends.SourceBackend alias Logflare.Backends.SourceDispatcher alias Logflare.Buffers.MemoryBuffer - alias __MODULE__.Pipeline - alias __MODULE__.PgRepo typedstruct enforce: true do field(:buffer_module, Adaptor.t()) From 4a1693260c7e9920084d45c43fd2dc4afabc04e0 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Mon, 17 Jul 2023 17:05:42 +0800 Subject: [PATCH 44/62] chore: fix compilation warning --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 3fb01b9a0..343e52dbb 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -11,7 +11,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do use TypedStruct use Logflare.Backends.Adaptor - alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Backends.Adaptor.PostgresAdaptor.Pipeline alias Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo alias Logflare.Backends From 11f463eac8639a661ed873aa277be969c6a79b81 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Thu, 13 Jul 2023 04:52:51 +0800 Subject: [PATCH 45/62] chore: failing tests --- test/logflare/endpoints_test.exs | 50 +++++--------------------------- 1 file changed, 7 insertions(+), 43 deletions(-) diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index c5adb6189..a9d5b63d2 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -198,13 +198,14 @@ defmodule Logflare.EndpointsTest do describe "running queries in postgres backends" do setup do - stub(Goth, :fetch, fn _mod -> {:ok, %Goth.Token{token: "auth-token"}} end) + insert(:plan) - %{username: username, password: password, database: database, hostname: hostname} = - Application.get_env(:logflare, Logflare.Repo) |> Map.new() + repo = Application.get_env(:logflare, Logflare.Repo) - url = "postgresql://#{username}:#{password}@#{hostname}/#{database}" + url = + "postgresql://#{repo[:username]}:#{repo[:password]}@#{repo[:hostname]}/#{repo[:database]}" + config = %{"url" => url} user = insert(:user) source = insert(:source, user: user, name: "c") @@ -229,8 +230,7 @@ defmodule Logflare.EndpointsTest do test "run an endpoint query without caching", %{source: source, user: user} do query = "select body from #{source.name}" - source_mapping = %{source.name => source.token} - endpoint = insert(:endpoint, user: user, query: query, source_mapping: source_mapping) + endpoint = insert(:endpoint, user: user, query: query) assert {:ok, %{rows: []}} = Endpoints.run_query(endpoint) end @@ -241,44 +241,8 @@ defmodule Logflare.EndpointsTest do test "run_cached_query/1", %{source: source, user: user} do query = "select body from #{source.name}" - source_mapping = %{source.name => source.token} - endpoint = insert(:endpoint, user: user, query: query, source_mapping: source_mapping) + endpoint = insert(:endpoint, user: user, query: query) assert {:ok, %{rows: []}} = Endpoints.run_cached_query(endpoint) end - - for field_changed <- [ - :query, - :sandboxable, - :cache_duration_seconds, - :proactive_requerying_seconds, - :max_limit - ] do - test "update_query/2 will kill all existing caches on field change (#{field_changed})", %{ - source: source, - user: user - } do - query = "select body from #{source.name}" - source_mapping = %{source.name => source.token} - endpoint = insert(:endpoint, user: user, query: query, source_mapping: source_mapping) - cache_pid = start_supervised!({Logflare.Endpoints.Cache, {endpoint, %{}}}) - - assert {:ok, %{rows: []}} = Endpoints.run_cached_query(endpoint) - - params = - case unquote(field_changed) do - :query -> %{query: "select timestamp from #{source.name}"} - :sandboxable -> %{sandboxable: true} - # integer keys - key -> Map.new([{key, 123}]) - end - - assert {:ok, updated} = Endpoints.update_query(endpoint, params) - # should kill the cache process - :timer.sleep(500) - refute Process.alive?(cache_pid) - # 2nd query should not hit cache - assert {:ok, %{rows: []}} = Endpoints.run_cached_query(updated) - end - end end end From 7aff5584b89d61e385933e08b1b6af0f64590a0a Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 11:57:32 +0800 Subject: [PATCH 46/62] chore: rename repo binding for clarity --- test/logflare/endpoints_test.exs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index a9d5b63d2..feefc13e2 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -200,10 +200,10 @@ defmodule Logflare.EndpointsTest do setup do insert(:plan) - repo = Application.get_env(:logflare, Logflare.Repo) + cfg = Application.get_env(:logflare, Logflare.Repo) url = - "postgresql://#{repo[:username]}:#{repo[:password]}@#{repo[:hostname]}/#{repo[:database]}" + "postgresql://#{cfg[:username]}:#{cfg[:password]}@#{cfg[:hostname]}/#{cfg[:database]}" config = %{"url" => url} user = insert(:user) From 876f1a835d872918549524ea576bd4e62c7ff3fa Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 13:18:22 +0800 Subject: [PATCH 47/62] feat: add list_source_backends_by_user_id --- lib/logflare/backends.ex | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/lib/logflare/backends.ex b/lib/logflare/backends.ex index 3cfa48fe2..2a4d85c45 100644 --- a/lib/logflare/backends.ex +++ b/lib/logflare/backends.ex @@ -29,6 +29,16 @@ defmodule Logflare.Backends do |> Enum.map(fn sb -> typecast_config_string_map_to_atom_map(sb) end) end + @doc """ + Lists `SourceBackend`s by user + """ + @spec list_source_backends_by_user_id(integer()) :: [SourceBackend.t()] + def list_source_backends_by_user_id(id) when is_integer(id) do + from(sb in SourceBackend, join: s in Source, where: s.user_id == ^id) + |> Repo.all() + |> Enum.map(fn sb -> typecast_config_string_map_to_atom_map(sb) end) + end + @doc """ Creates a SourceBackend for a given source. """ From 1b66983fbb6f7a8d4f924e4310153ccdb1fabf19 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 13:18:51 +0800 Subject: [PATCH 48/62] fix: add adaptor genserver name --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 4 +++- lib/logflare/backends/adaptor/webhook_adaptor.ex | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 343e52dbb..768438cbc 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -28,7 +28,9 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do end def start_link(%SourceBackend{} = source_backend) do - GenServer.start_link(__MODULE__, source_backend) + GenServer.start_link(__MODULE__, source_backend, + name: Backends.via_source_backend(source_backend, __MODULE__) + ) end @impl true diff --git a/lib/logflare/backends/adaptor/webhook_adaptor.ex b/lib/logflare/backends/adaptor/webhook_adaptor.ex index df546bb2e..1a9f4b041 100644 --- a/lib/logflare/backends/adaptor/webhook_adaptor.ex +++ b/lib/logflare/backends/adaptor/webhook_adaptor.ex @@ -20,7 +20,9 @@ defmodule Logflare.Backends.Adaptor.WebhookAdaptor do end def start_link(%SourceBackend{} = source_backend) do - GenServer.start_link(__MODULE__, source_backend) + GenServer.start_link(__MODULE__, source_backend, + name: Backends.via_source_backend(source_backend, __MODULE__) + ) end @impl true From d78a29271ccfbde45dbe8b55e2d296ba0fe266fe Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 13:31:13 +0800 Subject: [PATCH 49/62] feat: add ensure_source_sup_started/1 --- lib/logflare/backends.ex | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/lib/logflare/backends.ex b/lib/logflare/backends.ex index 2a4d85c45..0074f0e02 100644 --- a/lib/logflare/backends.ex +++ b/lib/logflare/backends.ex @@ -199,6 +199,18 @@ defmodule Logflare.Backends do end end + @doc """ + Ensures that a the SourceSup is started. Only returns error tuple if not alreadt started + """ + @spec ensure_source_sup_started(Source.t()) :: :ok | {:error, term()} + def ensure_source_sup_started(%Source{} = source) do + case start_source_sup(source) do + {:ok, _pid} -> :ok + {:error, :already_started} -> :ok + {:error, _} = err -> err + end + end + @doc """ Stops a given SourceSup for a source. if not started, it will return an error tuple. """ From e2a279a3ad6fd0d5f67fa4ea94cbb2c2f4856758 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 14:33:57 +0800 Subject: [PATCH 50/62] feat: add :language to to endpoint_queries --- lib/logflare/endpoints.ex | 90 +++++++++---------- lib/logflare/endpoints/query.ex | 48 +++------- lib/logflare/sql_v2.ex | 5 +- lib/logflare_web/live/endpoints_live.ex | 2 +- ..._add_language_column_to_endpoint_query.exs | 10 +++ test/logflare/endpoints_test.exs | 22 +++-- test/support/factory.ex | 1 + 7 files changed, 86 insertions(+), 92 deletions(-) create mode 100644 priv/repo/migrations/20230714041101_add_language_column_to_endpoint_query.exs diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index 1621a5cf0..357b9c826 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -1,7 +1,6 @@ defmodule Logflare.Endpoints do @moduledoc false alias Ecto.Adapters.SQL - alias Logflare.Backends.Adaptor.PostgresAdaptor alias Logflare.Endpoints.Cache alias Logflare.Endpoints.Query alias Logflare.Endpoints.Resolver @@ -9,6 +8,9 @@ defmodule Logflare.Endpoints do alias Logflare.User alias Logflare.Users alias Logflare.Utils + alias Logflare.Backends + alias Logflare.Backends.SourceBackend + alias Logflare.Backends.Adaptor.PostgresAdaptor import Ecto.Query @typep run_query_return :: {:ok, %{rows: [map()]}} | {:error, String.t()} @@ -84,7 +86,7 @@ defmodule Logflare.Endpoints do user |> Ecto.build_assoc(:endpoint_queries, sandbox_query: sandbox) |> Repo.preload(:user) - |> Query.sandboxed_endpoint_changeset(attrs) + |> Query.sandboxed_endpoint_changeset(attrs, sandbox) |> Repo.insert() false -> @@ -153,19 +155,11 @@ defmodule Logflare.Endpoints do transform_input = if(sandboxable && sql_param, do: {query_string, sql_param}, else: query_string) - {backend, sources} = Query.choose_backend_and_extract_sources(endpoint_query) - with {:ok, declared_params} <- Logflare.SqlV2.parameters(query_string), - {:ok, transformed_query} <- Logflare.SqlV2.transform(backend, transform_input, user_id), + {:ok, transformed_query} <- + Logflare.SqlV2.transform(endpoint_query.language, transform_input, user_id), {:ok, result} <- - run_on_backend( - backend, - sources, - endpoint_query, - transformed_query, - declared_params, - params - ) do + exec_query_on_backend(endpoint_query, transformed_query, declared_params, params) do {:ok, result} end end @@ -174,12 +168,14 @@ defmodule Logflare.Endpoints do Runs a query string ### Example - iex> run_query_string(%User{...}, "select current_time() where @value > 4", params: %{"value" => "123"}) + iex> run_query_string(%User{...}, {:bq_sql, "select current_time() where @value > 4"}, params: %{"value" => "123"}) {:ok, %{rows: [...]} } """ @typep run_query_string_opts :: [sandboxable: boolean(), params: map()] - @spec run_query_string(User.t(), String.t(), run_query_string_opts()) :: run_query_return() - def run_query_string(user, query_string, opts \\ %{}) do + @typep language :: :bq_sql | :pg_sql | :lql + @spec run_query_string(User.t(), {language(), String.t()}, run_query_string_opts()) :: + run_query_return() + def run_query_string(user, {language, query_string}, opts \\ %{}) do opts = Enum.into(opts, %{sandboxable: false, params: %{}}) source_mapping = @@ -190,6 +186,7 @@ defmodule Logflare.Endpoints do query = %Query{ query: query_string, + language: language, sandboxable: opts.sandboxable, user: user, user_id: user.id, @@ -209,7 +206,38 @@ defmodule Logflare.Endpoints do |> Cache.query() end - defp exec_sql_on_bq(%Query{} = endpoint_query, transformed_query, declared_params, input_params) + defp exec_query_on_backend( + %Query{language: :pg_sql} = endpoint_query, + transformed_query, + declared_params, + params + ) do + # find compatible source backend + # TODO: move this to Backends module + source_backend = + Backends.list_source_backends_by_user_id(endpoint_query.user_id) + |> Repo.preload([:source]) + |> Enum.filter(fn sb -> sb.type == :postgres end) + |> List.first() + |> then(fn + nil -> + raise "No matching source backend found for Postgres query execution" + + other -> + other + end) + + with {:ok, rows} <- PostgresAdaptor.execute_query(source_backend, transformed_query) do + {:ok, %{rows: rows}} + end + end + + defp exec_query_on_backend( + %Query{language: _} = endpoint_query, + transformed_query, + declared_params, + input_params + ) when is_binary(transformed_query) and is_list(declared_params) and is_map(input_params) do @@ -291,32 +319,4 @@ defmodule Logflare.Endpoints do message end end - - defp run_on_backend(:bigquery, _, endpoint_query, transformed_query, declared_params, params), - do: exec_sql_on_bq(endpoint_query, transformed_query, declared_params, params) - - defp run_on_backend( - :postgres, - [source], - endpoint_query, - transformed_query, - declared_params, - params - ), - do: exec_sql_on_pg(source, endpoint_query, transformed_query, declared_params, params) - - defp run_on_backend(:postgres, _, _, _, _, _), - do: {:error, "Postgres does not support multiple sources"} - - defp exec_sql_on_pg(%{source_backends: [source_backend]}, _, transformed_query, _, input_params) do - source_backend = Repo.preload(source_backend, :source) - - with repo <- PostgresAdaptor.create_repo(source_backend), - :ok <- PostgresAdaptor.connect_to_repo(source_backend), - {:ok, result} <- SQL.query(repo, transformed_query, Map.to_list(input_params)), - %{columns: columns, rows: rows} <- result do - rows = Enum.map(rows, fn row -> columns |> Enum.zip(row) |> Map.new() end) - {:ok, %{rows: rows}} - end - end end diff --git a/lib/logflare/endpoints/query.ex b/lib/logflare/endpoints/query.ex index 8af0c2c3e..ea2e03320 100644 --- a/lib/logflare/endpoints/query.ex +++ b/lib/logflare/endpoints/query.ex @@ -24,6 +24,7 @@ defmodule Logflare.Endpoints.Query do field(:token, Ecto.UUID, autogenerate: true) field(:name, :string) field(:query, :string) + field(:language, Ecto.Enum, values: [:bq_sql, :pg_sql, :lql]) field(:source_mapping, :map) field(:sandboxable, :boolean) field(:cache_duration_seconds, :integer, default: 3_600) @@ -49,9 +50,10 @@ defmodule Logflare.Endpoints.Query do :cache_duration_seconds, :proactive_requerying_seconds, :max_limit, - :enable_auth + :enable_auth, + :language ]) - |> validate_required([:name, :query]) + |> validate_required([:name, :query, :language]) end def update_by_user_changeset(query, attrs) do @@ -64,14 +66,15 @@ defmodule Logflare.Endpoints.Query do :cache_duration_seconds, :proactive_requerying_seconds, :max_limit, - :enable_auth + :enable_auth, + :language ]) |> validate_query(:query) |> default_validations() |> update_source_mapping() end - def sandboxed_endpoint_changeset(query, attrs) do + def sandboxed_endpoint_changeset(query, attrs, sandbox) do query |> cast(attrs, [ :name, @@ -80,59 +83,34 @@ defmodule Logflare.Endpoints.Query do :cache_duration_seconds, :proactive_requerying_seconds, :max_limit, - :enable_auth + :enable_auth, + :language ]) |> put_change(:sandboxable, false) + |> Ecto.Changeset.put_change(:language, sandbox.language) |> validate_required([:sandbox_query]) |> default_validations() end def default_validations(changeset) do changeset - |> validate_required([:name, :query, :user]) + |> validate_required([:name, :query, :user, :language]) |> unique_constraint(:name, name: :endpoint_queries_name_index) |> unique_constraint(:token) |> validate_number(:max_limit, greater_than: 0, less_than: 10_001) end def validate_query(changeset, field) when is_atom(field) do - {backend, _} = choose_backend_and_extract_sources(changeset.data) + language = Ecto.Changeset.get_field(changeset, :language, :bq_sql) validate_change(changeset, field, fn field, value -> - case Logflare.SqlV2.transform(backend, value, get_field(changeset, :user)) do + case Logflare.SqlV2.transform(language, value, get_field(changeset, :user)) do {:ok, _} -> [] {:error, error} -> [{field, error}] end end) end - @doc """ - Defines which backend should be used to run a given query - """ - @spec choose_backend_and_extract_sources(Query.t()) :: {:bigquery | :postgres, [Source.t()]} - def choose_backend_and_extract_sources(%Query{source_mapping: nil}), do: {:bigquery, nil} - - def choose_backend_and_extract_sources(%Query{source_mapping: source_mapping}) do - sources = - Enum.map(source_mapping, fn {_, token} -> - Sources.get_by_and_preload([token: token], [:source_backends]) - end) - - backend = - sources - |> Enum.reduce([], fn - %{source_backends: []}, acc -> [:bigquery] ++ acc - %{source_backends: source_backends}, acc -> Enum.map(source_backends, & &1.type) ++ acc - end) - |> Enum.uniq() - |> then(fn - [backend_type] -> backend_type - _ -> :bigquery - end) - - {backend, sources} - end - # Only update source mapping if there are no errors defp update_source_mapping(%{errors: [], changes: %{query: query}} = changeset) when is_binary(query) do diff --git a/lib/logflare/sql_v2.ex b/lib/logflare/sql_v2.ex index 2562af7bb..9962fd30d 100644 --- a/lib/logflare/sql_v2.ex +++ b/lib/logflare/sql_v2.ex @@ -35,7 +35,7 @@ defmodule Logflare.SqlV2 do transform(backend, input, user) end - def transform(:postgres, input, user) do + def transform(:pg_sql, input, user) do {:ok, [input]} = Parser.parse(input) sources = Sources.list_sources_by_user(user) @@ -58,7 +58,8 @@ defmodule Logflare.SqlV2 do Parser.to_string(input) end - def transform(:bigquery, input, %User{} = user) do + # default to bq_sql + def transform(lang, input, %User{} = user) when lang in [:bq_sql, nil] do %_{bigquery_project_id: user_project_id, bigquery_dataset_id: user_dataset_id} = user {query, sandboxed_query} = diff --git a/lib/logflare_web/live/endpoints_live.ex b/lib/logflare_web/live/endpoints_live.ex index 1f221b005..53f919166 100644 --- a/lib/logflare_web/live/endpoints_live.ex +++ b/lib/logflare_web/live/endpoints_live.ex @@ -261,7 +261,7 @@ defmodule LogflareWeb.EndpointsLive do %{"query_params" => query_params, "query_string" => query_string}, %{assigns: %{user: user}} = socket ) do - result = Endpoints.run_query_string(user, query_string, query_params) + result = Endpoints.run_query_string(user, {:bq_sql, query_string}, query_params) socket = handle_query_result(socket, result) {:noreply, socket} end diff --git a/priv/repo/migrations/20230714041101_add_language_column_to_endpoint_query.exs b/priv/repo/migrations/20230714041101_add_language_column_to_endpoint_query.exs new file mode 100644 index 000000000..c0d4ace4e --- /dev/null +++ b/priv/repo/migrations/20230714041101_add_language_column_to_endpoint_query.exs @@ -0,0 +1,10 @@ +defmodule Logflare.Repo.Migrations.AddLanguageColumnToEndpointQuery do + use Ecto.Migration + + def change do + + alter table(:endpoint_queries) do + add(:language, :string) + end + end +end diff --git a/test/logflare/endpoints_test.exs b/test/logflare/endpoints_test.exs index feefc13e2..48d82dc37 100644 --- a/test/logflare/endpoints_test.exs +++ b/test/logflare/endpoints_test.exs @@ -77,6 +77,8 @@ defmodule Logflare.EndpointsTest do assert %{name: "abc", query: "select r from u", sandboxable: false} = sandboxed + assert sandboxed.language == sandbox_query.language + # non-cte invalid_sandbox = insert(:endpoint, user: user) @@ -94,7 +96,8 @@ defmodule Logflare.EndpointsTest do assert {:ok, %_{query: stored_sql, source_mapping: mapping}} = Endpoints.create_query(user, %{ name: "fully-qualified", - query: "select @test from #{source.name}" + query: "select @test from #{source.name}", + language: :bq_sql }) assert stored_sql =~ "mysource" @@ -107,7 +110,8 @@ defmodule Logflare.EndpointsTest do assert {:ok, %_{query: stored_sql, source_mapping: mapping}} = Endpoints.create_query(user, %{ name: "fully-qualified", - query: "select @test from `myproject.mydataset.mytable`" + query: "select @test from `myproject.mydataset.mytable`", + language: :bq_sql }) assert mapping == %{} @@ -143,7 +147,9 @@ defmodule Logflare.EndpointsTest do user = insert(:user) insert(:source, user: user, name: "c") query_string = "select current_datetime() as testing" - assert {:ok, %{rows: [%{"testing" => _}]}} = Endpoints.run_query_string(user, query_string) + + assert {:ok, %{rows: [%{"testing" => _}]}} = + Endpoints.run_query_string(user, {:bq_sql, query_string}) end test "run_cached_query/1" do @@ -202,10 +208,8 @@ defmodule Logflare.EndpointsTest do cfg = Application.get_env(:logflare, Logflare.Repo) - url = - "postgresql://#{cfg[:username]}:#{cfg[:password]}@#{cfg[:hostname]}/#{cfg[:database]}" + url = "postgresql://#{cfg[:username]}:#{cfg[:password]}@#{cfg[:hostname]}/#{cfg[:database]}" - config = %{"url" => url} user = insert(:user) source = insert(:source, user: user, name: "c") @@ -230,18 +234,18 @@ defmodule Logflare.EndpointsTest do test "run an endpoint query without caching", %{source: source, user: user} do query = "select body from #{source.name}" - endpoint = insert(:endpoint, user: user, query: query) + endpoint = insert(:endpoint, user: user, query: query, language: :pg_sql) assert {:ok, %{rows: []}} = Endpoints.run_query(endpoint) end test "run_query_string/3", %{source: source, user: user} do query = "select body from #{source.name}" - assert {:ok, %{rows: []}} = Endpoints.run_query_string(user, query) + assert {:ok, %{rows: []}} = Endpoints.run_query_string(user, {:pg_sql, query}) end test "run_cached_query/1", %{source: source, user: user} do query = "select body from #{source.name}" - endpoint = insert(:endpoint, user: user, query: query) + endpoint = insert(:endpoint, user: user, query: query, language: :pg_sql) assert {:ok, %{rows: []}} = Endpoints.run_cached_query(endpoint) end end diff --git a/test/support/factory.ex b/test/support/factory.ex index a25c53876..c379fb778 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -178,6 +178,7 @@ defmodule Logflare.Factory do user: build(:user), token: Ecto.UUID.generate(), query: "select current_date() as date", + language: :bq_sql, name: TestUtils.random_string() } end From bd97907b330a1a513b7d07aab4f8d4a63849c219 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 16:53:16 +0800 Subject: [PATCH 51/62] chore: fix failing endpoints tests, compilation warning --- lib/logflare/endpoints/query.ex | 1 - lib/logflare/single_tenant.ex | 3 +++ .../controllers/api/endpoint_controller_test.exs | 6 +++--- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/logflare/endpoints/query.ex b/lib/logflare/endpoints/query.ex index ea2e03320..d28cc72de 100644 --- a/lib/logflare/endpoints/query.ex +++ b/lib/logflare/endpoints/query.ex @@ -5,7 +5,6 @@ defmodule Logflare.Endpoints.Query do require Logger alias Logflare.Endpoints.Query - alias Logflare.Sources @derive {Jason.Encoder, only: [ diff --git a/lib/logflare/single_tenant.ex b/lib/logflare/single_tenant.ex index cec0890c1..b1dd71313 100644 --- a/lib/logflare/single_tenant.ex +++ b/lib/logflare/single_tenant.ex @@ -57,6 +57,7 @@ defmodule Logflare.SingleTenant do Application.app_dir(:logflare, "priv/supabase/endpoints/logs.all.sql") |> File.read!(), sandboxable: true, max_limit: 1000, + language: :bq_sql, enable_auth: true, cache_duration_seconds: 0 }, @@ -67,6 +68,7 @@ defmodule Logflare.SingleTenant do |> File.read!(), sandboxable: true, max_limit: 1000, + language: :bq_sql, enable_auth: true, cache_duration_seconds: 900, proactive_requerying_seconds: 300 @@ -78,6 +80,7 @@ defmodule Logflare.SingleTenant do |> File.read!(), sandboxable: true, max_limit: 1000, + language: :bq_sql, enable_auth: true, cache_duration_seconds: 900, proactive_requerying_seconds: 300 diff --git a/test/logflare_web/controllers/api/endpoint_controller_test.exs b/test/logflare_web/controllers/api/endpoint_controller_test.exs index 626d7d1e8..587860168 100644 --- a/test/logflare_web/controllers/api/endpoint_controller_test.exs +++ b/test/logflare_web/controllers/api/endpoint_controller_test.exs @@ -67,7 +67,7 @@ defmodule LogflareWeb.Api.EndpointControllerTest do response = conn |> add_access_token(user, ~w(private)) - |> post("/api/endpoints", %{name: name, query: "select a from logs"}) + |> post("/api/endpoints", %{name: name, language: "bq_sql", query: "select a from logs"}) |> json_response(201) assert response["name"] == name @@ -80,7 +80,7 @@ defmodule LogflareWeb.Api.EndpointControllerTest do |> post("/api/endpoints") |> json_response(422) - assert resp == %{"errors" => %{"name" => ["can't be blank"], "query" => ["can't be blank"]}} + assert %{"errors" => %{"name" => _, "query" => _, "language" => _}} = resp end test "returns 422 on bad arguments", %{conn: conn, user: user} do @@ -90,7 +90,7 @@ defmodule LogflareWeb.Api.EndpointControllerTest do |> post("/api/endpoints", %{name: 123}) |> json_response(422) - assert resp == %{"errors" => %{"name" => ["is invalid"], "query" => ["can't be blank"]}} + assert %{"errors" => %{"name" => _, "query" => _, "language" => _}} = resp end end From bd25472cc8212bc59b28e648035aa4624ec713fb Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 16:56:06 +0800 Subject: [PATCH 52/62] chore: fix failing endpoints ui tests --- assets/js/interfaces/EndpointEditor.jsx | 1 + test/logflare_web/live_views/endpoints_live_test.exs | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/assets/js/interfaces/EndpointEditor.jsx b/assets/js/interfaces/EndpointEditor.jsx index d0b3fce79..81d869914 100644 --- a/assets/js/interfaces/EndpointEditor.jsx +++ b/assets/js/interfaces/EndpointEditor.jsx @@ -15,6 +15,7 @@ const EndpointEditor = ({ const [queryParams, setQueryParams] = useState({ name: endpoint?.name || defaultValues?.name || "", query: endpoint?.query || "", + language: "bq_sql" }) const [testParams, setTestParams] = useState({}) const handleSubmit = (e) => { diff --git a/test/logflare_web/live_views/endpoints_live_test.exs b/test/logflare_web/live_views/endpoints_live_test.exs index 6e7f2dc0e..fb8577989 100644 --- a/test/logflare_web/live_views/endpoints_live_test.exs +++ b/test/logflare_web/live_views/endpoints_live_test.exs @@ -83,7 +83,8 @@ defmodule LogflareWeb.EndpointsLiveTest do render_hook(view, "save-endpoint", %{ endpoint: %{ name: "some query", - query: new_query + query: new_query, + language: "bq_sql" } }) From dfa2ffb2490f3deba4c447b437237407439a02ba Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 16:56:54 +0800 Subject: [PATCH 53/62] feat: add in postgres parser --- lib/logflare/sql_v2.ex | 47 ++-- lib/logflare/sql_v2/parser.ex | 6 +- native/sqlparser_ex/src/lib.rs | 40 ++- test/logflare/sql_test.exs | 459 +++++++++++++++++---------------- 4 files changed, 291 insertions(+), 261 deletions(-) diff --git a/lib/logflare/sql_v2.ex b/lib/logflare/sql_v2.ex index 9962fd30d..294be8341 100644 --- a/lib/logflare/sql_v2.ex +++ b/lib/logflare/sql_v2.ex @@ -29,14 +29,15 @@ defmodule Logflare.SqlV2 do {:ok, "..."} """ @typep input :: String.t() | {String.t(), String.t()} - @spec transform(atom(), input(), User.t() | pos_integer()) :: {:ok, String.t()} - def transform(backend, input, user_id) when is_integer(user_id) do + @typep language :: :pg_sql | :bq_sql + @spec transform(language(), input(), User.t() | pos_integer()) :: {:ok, String.t()} + def transform(lang, input, user_id) when is_integer(user_id) do user = Logflare.Users.get(user_id) - transform(backend, input, user) + transform(lang, input, user) end def transform(:pg_sql, input, user) do - {:ok, [input]} = Parser.parse(input) + {:ok, [input]} = Parser.parse("postgres", input) sources = Sources.list_sources_by_user(user) source_mapping = source_mapping(sources) @@ -71,7 +72,7 @@ defmodule Logflare.SqlV2 do sources = Sources.list_sources_by_user(user) source_mapping = source_mapping(sources) - with {:ok, statements} <- Parser.parse(query), + with {:ok, statements} <- Parser.parse("bigquery", query), data = %{ logflare_project_id: Application.get_env(:logflare, Logflare.Google)[:project_id], user_project_id: user_project_id, @@ -82,7 +83,8 @@ defmodule Logflare.SqlV2 do source_names: Map.keys(source_mapping), sandboxed_query: sandboxed_query, sandboxed_query_ast: nil, - ast: statements + ast: statements, + dialect: "bigquery" }, :ok <- validate_query(statements, data), {:ok, sandboxed_query_ast} <- sandboxed_ast(data), @@ -95,14 +97,18 @@ defmodule Logflare.SqlV2 do end end - defp sandboxed_ast(%{sandboxed_query: q}) when is_binary(q), do: Parser.parse(q) + defp sandboxed_ast(%{sandboxed_query: q, dialect: dialect}) when is_binary(q), + do: Parser.parse(dialect, q) + defp sandboxed_ast(_), do: {:ok, nil} @doc """ Performs a check if a query contains a CTE. returns true if it is, returns false if not """ - def contains_cte?(query) do - with {:ok, ast} <- Parser.parse(query), + def contains_cte?(query, opts \\ []) do + opts = Enum.into(opts, %{dialect: "bigquery"}) + + with {:ok, ast} <- Parser.parse(opts.dialect, query), [_ | _] <- extract_cte_alises(ast) do true else @@ -438,7 +444,9 @@ defmodule Logflare.SqlV2 do {:ok, %{"my_table" => "abced-weqqwe-..."}} """ @spec sources(String.t(), User.t()) :: {:ok, %{String.t() => String.t()}} | {:error, String.t()} - def sources(query, user) do + def sources(query, user, opts \\ []) do + opts = Enum.into(opts, %{dialect: "bigquery"}) + sources = Sources.list_sources_by_user(user) source_names = for s <- sources, do: s.name @@ -448,7 +456,7 @@ defmodule Logflare.SqlV2 do end sources = - with {:ok, ast} <- Parser.parse(query), + with {:ok, ast} <- Parser.parse(opts.dialect, query), names <- ast |> find_all_source_names() @@ -519,14 +527,17 @@ defmodule Logflare.SqlV2 do iex> source_mapping("select a from old_table_name", %{"old_table_name"=> "abcde-fg123-..."}, %User{}) {:ok, "select a from new_table_name"} """ - def source_mapping(query, %Logflare.User{id: user_id}, mapping) do - source_mapping(query, user_id, mapping) + def source_mapping(query, user, mapping, opts \\ []) + + def source_mapping(query, %Logflare.User{id: user_id}, mapping, opts) do + source_mapping(query, user_id, mapping, opts) end - def source_mapping(query, user_id, mapping) do + def source_mapping(query, user_id, mapping, opts) do + opts = Enum.into(opts, %{dialect: "bigquery"}) sources = Sources.list_sources_by_user(user_id) - with {:ok, ast} <- Parser.parse(query) do + with {:ok, ast} <- Parser.parse(opts.dialect, query) do ast |> replace_old_source_names(%{ sources: sources, @@ -598,8 +609,10 @@ defmodule Logflare.SqlV2 do iex> parameters(query) {:ok, ["something"]} """ - def parameters(query) do - with {:ok, ast} <- Parser.parse(query) do + def parameters(query, opts \\ []) do + opts = Enum.into(opts, %{dialect: "bigquery"}) + + with {:ok, ast} <- Parser.parse(opts.dialect, query) do {:ok, extract_all_parameters(ast)} end end diff --git a/lib/logflare/sql_v2/parser.ex b/lib/logflare/sql_v2/parser.ex index cdf1e1bdc..f980b5c77 100644 --- a/lib/logflare/sql_v2/parser.ex +++ b/lib/logflare/sql_v2/parser.ex @@ -3,12 +3,12 @@ defmodule Logflare.SqlV2.Parser do use Rustler, otp_app: :logflare, crate: "sqlparser_ex" # When your NIF is loaded, it will override this function. - def parse(_query), do: :erlang.nif_error(:nif_not_loaded) + def parse(_dialect, _query), do: :erlang.nif_error(:nif_not_loaded) def to_string(_query), do: :erlang.nif_error(:nif_not_loaded) end - def parse(query) do - with {:ok, json} <- Native.parse(query) do + def parse(dialect, query) when dialect in ["postgres", "bigquery"] do + with {:ok, json} <- Native.parse(dialect, query) do Jason.decode(json) end end diff --git a/native/sqlparser_ex/src/lib.rs b/native/sqlparser_ex/src/lib.rs index 6d9a2b478..95d69f595 100644 --- a/native/sqlparser_ex/src/lib.rs +++ b/native/sqlparser_ex/src/lib.rs @@ -1,8 +1,10 @@ -use rustler::NifTuple; -use rustler::NifResult; use rustler::Atom; +use rustler::NifResult; +use rustler::NifTuple; use sqlparser::dialect::BigQueryDialect; +use sqlparser::dialect::PostgreSqlDialect; use sqlparser::parser::Parser; +use sqlparser::parser::ParserError::ParserError; mod atoms { rustler::atoms! { @@ -11,7 +13,6 @@ mod atoms { } } - #[derive(NifTuple)] struct Response { status: Atom, @@ -19,28 +20,39 @@ struct Response { } #[rustler::nif] -fn parse(query: &str) -> NifResult { - let dialect = BigQueryDialect {}; // or AnsiDialect, or your own dialect ... - let result = Parser::parse_sql(&dialect, query); +fn parse(dialect_str: &str, query: &str) -> NifResult { + let result = match dialect_str { + "bigquery" => Parser::parse_sql(&BigQueryDialect {}, query), + "postgres" => Parser::parse_sql(&PostgreSqlDialect {}, query), + _ => Err(ParserError( + "Parser for this dialect is not supported.".to_string(), + )), + }; match result { - Ok(v) => Ok(Response{status: atoms::ok(), message: serde_json::to_string(&v).unwrap()}), - Err(v) => Ok(Response{status: atoms::error(), message: v.to_string()}), + Ok(v) => Ok(Response { + status: atoms::ok(), + message: serde_json::to_string(&v).unwrap(), + }), + Err(v) => Ok(Response { + status: atoms::error(), + message: v.to_string(), + }), } } - #[rustler::nif] fn to_string(json: &str) -> NifResult { let nodes: Vec = serde_json::from_str(json).unwrap(); - + let mut parts = vec![]; for node in nodes { - parts.push( - format!("{}", node) - ) + parts.push(format!("{}", node)) } - return Ok(Response{status: atoms::ok(), message: parts.join("\n")}); + return Ok(Response { + status: atoms::ok(), + message: parts.join("\n"), + }); } rustler::init!("Elixir.Logflare.SqlV2.Parser.Native", [parse, to_string]); diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs index 1709047f9..d06a09902 100644 --- a/test/logflare/sql_test.exs +++ b/test/logflare/sql_test.exs @@ -19,243 +19,248 @@ defmodule Logflare.SqlTest do end) end - test "parser can handle complex sql" do - user = insert(:user) + describe "bigquery dialect" do + test "parser can handle complex sql" do + user = insert(:user) - for input <- [ - "select d[0]", - "select d[offset(0)]" - ] do - assert {:ok, _v2} = SqlV2.transform(:bigquery, input, user) + for input <- [ + "select d[0]", + "select d[offset(0)]" + ] do + assert {:ok, _v2} = SqlV2.transform(:bq_sql, input, user) + end end - end - test "non-BYOB - transforms table names correctly" do - user = insert(:user) - source = insert(:source, user: user, name: "my_table") - source_dots = insert(:source, user: user, name: "my.table.name") - source_other = insert(:source, user: user, name: "other_table") - table = bq_table_name(source) - table_other = bq_table_name(source_other) - table_dots = bq_table_name(source_dots) - - for {input, expected} <- [ - # quoted - {"select val from `my_table` where `my_table`.val > 5", - "select val from #{table} where #{table}.val > 5"}, - # source names with dots - {"select val from `my.table.name` where `my.table.name`.val > 5", - "select val from #{table_dots} where #{table_dots}.val > 5"}, - # joins - {"select a from my_table join other_table as f on a = 123", - "select a from #{table} join #{table_other} as f on a = 123"}, - # cross join + unnest with no join condition - {"select a from my_table cross join unnest(my_table.col) as f", - "select a from #{table} cross join unnest(#{table}.col) as f"}, - # inner join + unnest with join condition - {"select a from my_table join unnest(my_table.col) on true", - "select a from #{table} join unnest(#{table}.col) on true"}, - # where - {"select val from my_table where my_table.val > 5", - "select val from #{table} where #{table}.val > 5"}, - # select named column - {"select val, my_table.abc from my_table", "select val, #{table}.abc from #{table}"}, - # group by - {"select val from my_table group by my_table.val", - "select val from #{table} group by #{table}.val"}, - # order by - {"select val from my_table order by my_table.val", - "select val from #{table} order by #{table}.val"}, - # CTE - {"with src as (select n from `my_table`) select n from src", - "with src as (select n from #{table}) select n from src"}, - # having - {"select val from my_table group by my_table.abc having count(my_table.id) > 5", - "select val from #{table} group by #{table}.abc having count(#{table}.id) > 5"}, - # alias - {"select a from my_table as src where src.b > 5", - "select a from #{table} as src where src.b > 5"}, - # joins - {"select a from my_table left join other_table on my_table.d = other_table.e", - "select a from #{table} left join #{table_other} on #{table}.d = #{table_other}.e"}, - # CTE with union - {"with abc as (select val from my_table where val > 5) select val from abc union select a from other_table", - "with abc as (select val from #{table} where val > 5) select val from abc union select a from #{table_other}"}, - # recursive CTE - {"with src as (select a from my_table union select a from src) select a from src", - "with src as (select a from #{table} union select a from src) select a from src"}, - # CTE referencing - { - "with src as (select a from my_table), abc as (select b from src) select c from abc union select a from src", - "with src as (select a from #{table}), abc as (select b from src) select c from abc union select a from src" - }, - # sandboxed queries - { - {"with src as (select a from my_table), src2 as (select a from src where a > 5) select c from src", - "select a, b, c from src2"}, - "with src as (select a from #{table}), src2 as (select a from src where a > 5) select a, b, c from src2" - }, - # sandboxed queries with order by - { - {"with src as (select a from my_table) select c from src", - "select c from src order by c asc"}, - "with src as (select a from #{table}) select c from src order by c asc" - } - ] do - assert {:ok, v2} = SqlV2.transform(:bigquery, input, user) - assert String.downcase(v2) == expected - assert {:ok, v2} = SqlV2.transform(:bigquery, input, user.id) - assert String.downcase(v2) == expected + test "non-BYOB - transforms table names correctly" do + user = insert(:user) + source = insert(:source, user: user, name: "my_table") + source_dots = insert(:source, user: user, name: "my.table.name") + source_other = insert(:source, user: user, name: "other_table") + table = bq_table_name(source) + table_other = bq_table_name(source_other) + table_dots = bq_table_name(source_dots) + + for {input, expected} <- [ + # quoted + {"select val from `my_table` where `my_table`.val > 5", + "select val from #{table} where #{table}.val > 5"}, + # source names with dots + {"select val from `my.table.name` where `my.table.name`.val > 5", + "select val from #{table_dots} where #{table_dots}.val > 5"}, + # joins + {"select a from my_table join other_table as f on a = 123", + "select a from #{table} join #{table_other} as f on a = 123"}, + # cross join + unnest with no join condition + {"select a from my_table cross join unnest(my_table.col) as f", + "select a from #{table} cross join unnest(#{table}.col) as f"}, + # inner join + unnest with join condition + {"select a from my_table join unnest(my_table.col) on true", + "select a from #{table} join unnest(#{table}.col) on true"}, + # where + {"select val from my_table where my_table.val > 5", + "select val from #{table} where #{table}.val > 5"}, + # select named column + {"select val, my_table.abc from my_table", "select val, #{table}.abc from #{table}"}, + # group by + {"select val from my_table group by my_table.val", + "select val from #{table} group by #{table}.val"}, + # order by + {"select val from my_table order by my_table.val", + "select val from #{table} order by #{table}.val"}, + # CTE + {"with src as (select n from `my_table`) select n from src", + "with src as (select n from #{table}) select n from src"}, + # having + {"select val from my_table group by my_table.abc having count(my_table.id) > 5", + "select val from #{table} group by #{table}.abc having count(#{table}.id) > 5"}, + # alias + {"select a from my_table as src where src.b > 5", + "select a from #{table} as src where src.b > 5"}, + # joins + {"select a from my_table left join other_table on my_table.d = other_table.e", + "select a from #{table} left join #{table_other} on #{table}.d = #{table_other}.e"}, + # CTE with union + {"with abc as (select val from my_table where val > 5) select val from abc union select a from other_table", + "with abc as (select val from #{table} where val > 5) select val from abc union select a from #{table_other}"}, + # recursive CTE + {"with src as (select a from my_table union select a from src) select a from src", + "with src as (select a from #{table} union select a from src) select a from src"}, + # CTE referencing + { + "with src as (select a from my_table), abc as (select b from src) select c from abc union select a from src", + "with src as (select a from #{table}), abc as (select b from src) select c from abc union select a from src" + }, + # sandboxed queries + { + {"with src as (select a from my_table), src2 as (select a from src where a > 5) select c from src", + "select a, b, c from src2"}, + "with src as (select a from #{table}), src2 as (select a from src where a > 5) select a, b, c from src2" + }, + # sandboxed queries with order by + { + {"with src as (select a from my_table) select c from src", + "select c from src order by c asc"}, + "with src as (select a from #{table}) select c from src order by c asc" + } + ] do + assert {:ok, v2} = SqlV2.transform(:bq_sql, input, user) + assert String.downcase(v2) == expected + assert {:ok, v2} = SqlV2.transform(:bq_sql, input, user.id) + assert String.downcase(v2) == expected + end + + # queries where v1 differs from v2, don't test for equality + for {input, expected} <- [ + # subquery + {"select a from (select b from my_table)", "select a from (select b from #{table})"} + ] do + assert {:ok, v2} = SqlV2.transform(:bq_sql, input, user) + assert String.downcase(v2) == expected + end end - # queries where v1 differs from v2, don't test for equality - for {input, expected} <- [ - # subquery - {"select a from (select b from my_table)", "select a from (select b from #{table})"} - ] do - assert {:ok, v2} = SqlV2.transform(:bigquery, input, user) - assert String.downcase(v2) == expected - end - end - - test "non-BYOB invalid queries" do - user = insert(:user) - insert(:source, user: user, name: "my_table") - insert(:source, user: user, name: "other_table") - - # invalid queries - for {input, expected} <- [ - # select-into queries are not parsed. - {"SELECT a FROM a INTO b", "end of statement"}, - { - {"with src as (select a from my_table) select c from src", - "select a from src into src"}, - "end of statement" - }, - # block no wildcard select - {"select * from a", "restricted wildcard"}, - {"SELECT a.* FROM a", "restricted wildcard"}, - {"SELECT q, a.* FROM a", "restricted wildcard"}, - {"SELECT a FROM (SELECT * FROM a)", "restricted wildcard"}, - {"WITH q AS (SELECT a FROM a) SELECT * FROM q", "restricted wildcard"}, - {"SELECT a FROM a UNION ALL SELECT * FROM b", "restricted wildcard"}, - { - {"with src as (select a from my_table) select c from src", "select * from src"}, - "restricted wildcard" - }, - # sandbox: restricted table references not in CTE - { - {"with src as (select a from my_table) select c from src", "select a from my_table"}, - "Table not found in CTE: (my_table)" - }, - # sandbox: restricted functions - {"SELECT SESSION_USER()", "Restricted function session_user"}, - {"SELECT EXTERNAL_QUERY('','')", "Restricted function external_query"}, - { - {"with src as (select a from my_table) select c from src", "select session_user()"}, - "Restricted function session_user" - }, - { - {"with src as (select a from my_table) select c from src", - "select external_query('','')"}, - "Restricted function external_query" - }, - # block DML - # https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax - { - "insert a (x,y) values ('test', 5)", - "Only SELECT queries allowed" - }, - { - "update a set x = 1", - "Only SELECT queries allowed" - }, - { - "delete from a where x = 1", - "Only SELECT queries allowed" - }, - { - "truncate table a", - "Only SELECT queries allowed" - }, - { - "MERGE t USING s ON t.product = s.product - WHEN MATCHED THEN - UPDATE SET quantity = t.quantity + s.quantity - WHEN NOT MATCHED THEN - INSERT (product, quantity) VALUES(product, quantity) ", - "Only SELECT queries allowed" - }, - { - "drop table a", - "Only SELECT queries allowed" - }, - {{"with src as (select a from my_table) select c from src", "update a set x=2"}, - "Only SELECT queries allowed"}, - {{"with src as (select a from my_table) select c from src", "drop table a"}, - "Only SELECT queries allowed"}, - # Block multiple queries - { - "select a from b; select c from d;", - "Only singular query allowed" - }, - {{"with src as (select a from my_table) select c from src", - "select a from b; select c from d;"}, "Only singular query allowed"}, - # no source name in query - {"select datetime() from light-two-os-directions-test", - "can't find source light-two-os-directions-test"}, - {"select datetime() from `light-two-os-directions-test`", - "can't find source light-two-os-directions-test"}, - {"with src as (select a from unknown_table) select datetime() from my_table", - "can't find source unknown_table"}, - # cannot query logflare project - {"select a from `#{@logflare_project_id}.mydataset.mytable`", "can't find source"}, - # fully qualified name that is not a source name should be rejected - {"select a from `a.b.c`", "can't find source"} - ] do - assert {:error, err} = SqlV2.transform(:bigquery, input, user) - - assert String.downcase(err) =~ String.downcase(expected), - "should error with '#{expected}'. input: #{inspect(input)}" + test "non-BYOB invalid queries" do + user = insert(:user) + insert(:source, user: user, name: "my_table") + insert(:source, user: user, name: "other_table") + + # invalid queries + for {input, expected} <- [ + # select-into queries are not parsed. + {"SELECT a FROM a INTO b", "end of statement"}, + { + {"with src as (select a from my_table) select c from src", + "select a from src into src"}, + "end of statement" + }, + # block no wildcard select + {"select * from a", "restricted wildcard"}, + {"SELECT a.* FROM a", "restricted wildcard"}, + {"SELECT q, a.* FROM a", "restricted wildcard"}, + {"SELECT a FROM (SELECT * FROM a)", "restricted wildcard"}, + {"WITH q AS (SELECT a FROM a) SELECT * FROM q", "restricted wildcard"}, + {"SELECT a FROM a UNION ALL SELECT * FROM b", "restricted wildcard"}, + { + {"with src as (select a from my_table) select c from src", "select * from src"}, + "restricted wildcard" + }, + # sandbox: restricted table references not in CTE + { + {"with src as (select a from my_table) select c from src", + "select a from my_table"}, + "Table not found in CTE: (my_table)" + }, + # sandbox: restricted functions + {"SELECT SESSION_USER()", "Restricted function session_user"}, + {"SELECT EXTERNAL_QUERY('','')", "Restricted function external_query"}, + { + {"with src as (select a from my_table) select c from src", "select session_user()"}, + "Restricted function session_user" + }, + { + {"with src as (select a from my_table) select c from src", + "select external_query('','')"}, + "Restricted function external_query" + }, + # block DML + # https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax + { + "insert a (x,y) values ('test', 5)", + "Only SELECT queries allowed" + }, + { + "update a set x = 1", + "Only SELECT queries allowed" + }, + { + "delete from a where x = 1", + "Only SELECT queries allowed" + }, + { + "truncate table a", + "Only SELECT queries allowed" + }, + { + "MERGE t USING s ON t.product = s.product + WHEN MATCHED THEN + UPDATE SET quantity = t.quantity + s.quantity + WHEN NOT MATCHED THEN + INSERT (product, quantity) VALUES(product, quantity) ", + "Only SELECT queries allowed" + }, + { + "drop table a", + "Only SELECT queries allowed" + }, + {{"with src as (select a from my_table) select c from src", "update a set x=2"}, + "Only SELECT queries allowed"}, + {{"with src as (select a from my_table) select c from src", "drop table a"}, + "Only SELECT queries allowed"}, + # Block multiple queries + { + "select a from b; select c from d;", + "Only singular query allowed" + }, + {{"with src as (select a from my_table) select c from src", + "select a from b; select c from d;"}, "Only singular query allowed"}, + # no source name in query + {"select datetime() from light-two-os-directions-test", + "can't find source light-two-os-directions-test"}, + {"select datetime() from `light-two-os-directions-test`", + "can't find source light-two-os-directions-test"}, + {"with src as (select a from unknown_table) select datetime() from my_table", + "can't find source unknown_table"}, + # cannot query logflare project + {"select a from `#{@logflare_project_id}.mydataset.mytable`", "can't find source"}, + # fully qualified name that is not a source name should be rejected + {"select a from `a.b.c`", "can't find source"} + ] do + assert {:error, err} = SqlV2.transform(:bq_sql, input, user) + + assert String.downcase(err) =~ String.downcase(expected), + "should error with '#{expected}'. input: #{inspect(input)}" + end end end - test "BigQuery - fully qualified names" do - user = - insert(:user, bigquery_project_id: @user_project_id, bigquery_dataset_id: @user_dataset_id) - - source_abc = insert(:source, user: user, name: "a.b.c") - source_cxy = insert(:source, user: user, name: "c.x.y") - source_cxyz = insert(:source, user: user, name: "c.x.y.z") - - for {input, expected} <- [ - # fully qualified names must start with the user's bigquery project - {"select a from `#{@user_project_id}.#{@user_dataset_id}.mytable`", - "select a from `#{@user_project_id}.#{@user_dataset_id}.mytable`"}, - # source names that look like dataset format - {"select a from `a.b.c`", "select a from #{bq_table_name(source_abc)}"}, - {"with a as (select b from `c.x.y`) select b from a", - "with a as (select b from #{bq_table_name(source_cxy)}) select b from a"}, - {"with a as (select b from `c.x.y.z`) select b from a", - "with a as (select b from #{bq_table_name(source_cxyz)}) select b from a"} - ] do - assert SqlV2.transform(:bigquery, input, user) |> elem(1) |> String.downcase() == expected + describe "Bigquery fully qualified" do + test "able to use fully qualified names in queries" do + user = + insert(:user, bigquery_project_id: @user_project_id, bigquery_dataset_id: @user_dataset_id) + + source_abc = insert(:source, user: user, name: "a.b.c") + source_cxy = insert(:source, user: user, name: "c.x.y") + source_cxyz = insert(:source, user: user, name: "c.x.y.z") + + for {input, expected} <- [ + # fully qualified names must start with the user's bigquery project + {"select a from `#{@user_project_id}.#{@user_dataset_id}.mytable`", + "select a from `#{@user_project_id}.#{@user_dataset_id}.mytable`"}, + # source names that look like dataset format + {"select a from `a.b.c`", "select a from #{bq_table_name(source_abc)}"}, + {"with a as (select b from `c.x.y`) select b from a", + "with a as (select b from #{bq_table_name(source_cxy)}) select b from a"}, + {"with a as (select b from `c.x.y.z`) select b from a", + "with a as (select b from #{bq_table_name(source_cxyz)}) select b from a"} + ] do + assert SqlV2.transform(:bq_sql, input, user) |> elem(1) |> String.downcase() == expected + end end - end - # This test checks if a source name starting with a logflare project id will get transformed correctly to the source value - # this ensures users cannot access other users' sources. - test "source name replacement attack check - transform sources that have a fully-qualified name starting with global logflare project id" do - user = insert(:user) - source_name = "#{@logflare_project_id}.some.table" - insert(:source, user: user, name: source_name) - input = "select a from `#{source_name}`" + # This test checks if a source name starting with a logflare project id will get transformed correctly to the source value + # this ensures users cannot access other users' sources. + test "source name replacement attack check - transform sources that have a fully-qualified name starting with global logflare project id" do + user = insert(:user) + source_name = "#{@logflare_project_id}.some.table" + insert(:source, user: user, name: source_name) + input = "select a from `#{source_name}`" - assert {:ok, transformed} = SqlV2.transform(:bigquery, input, user) - refute transformed =~ source_name + assert {:ok, transformed} = SqlV2.transform(:bq_sql, input, user) + refute transformed =~ source_name + end end - describe "single tenant - fully qualified name check" do + describe "bigquery single tenant - fully qualified name check" do @single_tenant_bq_project_id "single-tenant-id" TestUtils.setup_single_tenant( seed_user: true, @@ -267,7 +272,7 @@ defmodule Logflare.SqlTest do insert(:source, user: user, name: "a.b.c") input = " select a from `a.b.c`" - assert {:ok, transformed} = SqlV2.transform(:bigquery, input, user) + assert {:ok, transformed} = SqlV2.transform(:bq_sql, input, user) assert transformed =~ @single_tenant_bq_project_id refute transformed =~ @logflare_project_id end @@ -276,7 +281,7 @@ defmodule Logflare.SqlTest do user = SingleTenant.get_default_user() input = " select a from `#{@single_tenant_bq_project_id}.my_dataset.my_table`" - assert {:ok, transformed} = SqlV2.transform(:bigquery, input, user) + assert {:ok, transformed} = SqlV2.transform(:bq_sql, input, user) assert transformed =~ "#{@single_tenant_bq_project_id}.my_dataset.my_table" refute transformed =~ @logflare_project_id end @@ -374,7 +379,7 @@ defmodule Logflare.SqlTest do expected = {:ok, "SELECT body, event_message, timestamp FROM #{PostgresAdaptor.table_name(source)}"} - assert SqlV2.transform(:postgres, input, user) == expected + assert SqlV2.transform(:pg_sql, input, user) == expected end end end From 355c3fa3d455c0e83fd19e206b2b91315fec65e7 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 18:43:56 +0800 Subject: [PATCH 54/62] chore: rename SqlV2 to Sql --- lib/logflare/endpoints.ex | 8 +++--- lib/logflare/endpoints/query.ex | 6 ++--- lib/logflare/{sql_v2.ex => sql.ex} | 4 +-- lib/logflare/{sql_v2 => sql}/parser.ex | 2 +- native/sqlparser_ex/src/lib.rs | 2 +- test/logflare/sql_test.exs | 34 +++++++++++++------------- 6 files changed, 28 insertions(+), 28 deletions(-) rename lib/logflare/{sql_v2.ex => sql.ex} (99%) rename lib/logflare/{sql_v2 => sql}/parser.ex (94%) diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index 357b9c826..5fe8cc698 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -81,7 +81,7 @@ defmodule Logflare.Endpoints do """ @spec create_sandboxed_query(User.t(), Query.t(), map()) :: {:ok, Query.t()} | {:error, :no_cte} def create_sandboxed_query(user, sandbox, attrs) do - case Logflare.SqlV2.contains_cte?(sandbox.query) do + case Logflare.Sql.contains_cte?(sandbox.query) do true -> user |> Ecto.build_assoc(:endpoint_queries, sandbox_query: sandbox) @@ -139,7 +139,7 @@ defmodule Logflare.Endpoints do """ @spec parse_query_string(String.t()) :: {:ok, %{parameters: [String.t()]}} | {:error, any()} def parse_query_string(query_string) do - with {:ok, declared_params} <- Logflare.SqlV2.parameters(query_string) do + with {:ok, declared_params} <- Logflare.Sql.parameters(query_string) do {:ok, %{parameters: declared_params}} end end @@ -155,9 +155,9 @@ defmodule Logflare.Endpoints do transform_input = if(sandboxable && sql_param, do: {query_string, sql_param}, else: query_string) - with {:ok, declared_params} <- Logflare.SqlV2.parameters(query_string), + with {:ok, declared_params} <- Logflare.Sql.parameters(query_string), {:ok, transformed_query} <- - Logflare.SqlV2.transform(endpoint_query.language, transform_input, user_id), + Logflare.Sql.transform(endpoint_query.language, transform_input, user_id), {:ok, result} <- exec_query_on_backend(endpoint_query, transformed_query, declared_params, params) do {:ok, result} diff --git a/lib/logflare/endpoints/query.ex b/lib/logflare/endpoints/query.ex index d28cc72de..95f31db20 100644 --- a/lib/logflare/endpoints/query.ex +++ b/lib/logflare/endpoints/query.ex @@ -103,7 +103,7 @@ defmodule Logflare.Endpoints.Query do language = Ecto.Changeset.get_field(changeset, :language, :bq_sql) validate_change(changeset, field, fn field, value -> - case Logflare.SqlV2.transform(language, value, get_field(changeset, :user)) do + case Logflare.Sql.transform(language, value, get_field(changeset, :user)) do {:ok, _} -> [] {:error, error} -> [{field, error}] end @@ -113,7 +113,7 @@ defmodule Logflare.Endpoints.Query do # Only update source mapping if there are no errors defp update_source_mapping(%{errors: [], changes: %{query: query}} = changeset) when is_binary(query) do - case Logflare.SqlV2.sources(query, get_field(changeset, :user)) do + case Logflare.Sql.sources(query, get_field(changeset, :user)) do {:ok, source_mapping} -> put_change(changeset, :source_mapping, source_mapping) {:error, error} -> add_error(changeset, :query, error) end @@ -128,7 +128,7 @@ defmodule Logflare.Endpoints.Query do def map_query_sources( %__MODULE__{query: query, source_mapping: source_mapping, user_id: user_id} = q ) do - case Logflare.SqlV2.source_mapping(query, user_id, source_mapping) do + case Logflare.Sql.source_mapping(query, user_id, source_mapping) do {:ok, query} -> Map.put(q, :query, query) diff --git a/lib/logflare/sql_v2.ex b/lib/logflare/sql.ex similarity index 99% rename from lib/logflare/sql_v2.ex rename to lib/logflare/sql.ex index 294be8341..50f87d05d 100644 --- a/lib/logflare/sql_v2.ex +++ b/lib/logflare/sql.ex @@ -1,4 +1,4 @@ -defmodule Logflare.SqlV2 do +defmodule Logflare.Sql do @moduledoc """ SQL parsing and transformation based on open source parser. @@ -7,7 +7,7 @@ defmodule Logflare.SqlV2 do alias Logflare.Sources alias Logflare.User alias Logflare.SingleTenant - alias Logflare.SqlV2.Parser + alias Logflare.Sql.Parser alias Logflare.Backends.Adaptor.PostgresAdaptor.PgRepo @doc """ diff --git a/lib/logflare/sql_v2/parser.ex b/lib/logflare/sql/parser.ex similarity index 94% rename from lib/logflare/sql_v2/parser.ex rename to lib/logflare/sql/parser.ex index f980b5c77..351631f8d 100644 --- a/lib/logflare/sql_v2/parser.ex +++ b/lib/logflare/sql/parser.ex @@ -1,4 +1,4 @@ -defmodule Logflare.SqlV2.Parser do +defmodule Logflare.Sql.Parser do defmodule Native do use Rustler, otp_app: :logflare, crate: "sqlparser_ex" diff --git a/native/sqlparser_ex/src/lib.rs b/native/sqlparser_ex/src/lib.rs index 95d69f595..2492de122 100644 --- a/native/sqlparser_ex/src/lib.rs +++ b/native/sqlparser_ex/src/lib.rs @@ -55,4 +55,4 @@ fn to_string(json: &str) -> NifResult { }); } -rustler::init!("Elixir.Logflare.SqlV2.Parser.Native", [parse, to_string]); +rustler::init!("Elixir.Logflare.Sql.Parser.Native", [parse, to_string]); diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs index d06a09902..24bb465a3 100644 --- a/test/logflare/sql_test.exs +++ b/test/logflare/sql_test.exs @@ -2,7 +2,7 @@ defmodule Logflare.SqlTest do @moduledoc false use Logflare.DataCase alias Logflare.SingleTenant - alias Logflare.SqlV2 + alias Logflare.Sql alias Logflare.Backends.Adaptor.PostgresAdaptor @logflare_project_id "logflare-project-id" @user_project_id "user-project-id" @@ -27,7 +27,7 @@ defmodule Logflare.SqlTest do "select d[0]", "select d[offset(0)]" ] do - assert {:ok, _v2} = SqlV2.transform(:bq_sql, input, user) + assert {:ok, _v2} = Sql.transform(:bq_sql, input, user) end end @@ -103,9 +103,9 @@ defmodule Logflare.SqlTest do "with src as (select a from #{table}) select c from src order by c asc" } ] do - assert {:ok, v2} = SqlV2.transform(:bq_sql, input, user) + assert {:ok, v2} = Sql.transform(:bq_sql, input, user) assert String.downcase(v2) == expected - assert {:ok, v2} = SqlV2.transform(:bq_sql, input, user.id) + assert {:ok, v2} = Sql.transform(:bq_sql, input, user.id) assert String.downcase(v2) == expected end @@ -114,7 +114,7 @@ defmodule Logflare.SqlTest do # subquery {"select a from (select b from my_table)", "select a from (select b from #{table})"} ] do - assert {:ok, v2} = SqlV2.transform(:bq_sql, input, user) + assert {:ok, v2} = Sql.transform(:bq_sql, input, user) assert String.downcase(v2) == expected end end @@ -215,7 +215,7 @@ defmodule Logflare.SqlTest do # fully qualified name that is not a source name should be rejected {"select a from `a.b.c`", "can't find source"} ] do - assert {:error, err} = SqlV2.transform(:bq_sql, input, user) + assert {:error, err} = Sql.transform(:bq_sql, input, user) assert String.downcase(err) =~ String.downcase(expected), "should error with '#{expected}'. input: #{inspect(input)}" @@ -243,7 +243,7 @@ defmodule Logflare.SqlTest do {"with a as (select b from `c.x.y.z`) select b from a", "with a as (select b from #{bq_table_name(source_cxyz)}) select b from a"} ] do - assert SqlV2.transform(:bq_sql, input, user) |> elem(1) |> String.downcase() == expected + assert Sql.transform(:bq_sql, input, user) |> elem(1) |> String.downcase() == expected end end @@ -255,7 +255,7 @@ defmodule Logflare.SqlTest do insert(:source, user: user, name: source_name) input = "select a from `#{source_name}`" - assert {:ok, transformed} = SqlV2.transform(:bq_sql, input, user) + assert {:ok, transformed} = Sql.transform(:bq_sql, input, user) refute transformed =~ source_name end end @@ -272,7 +272,7 @@ defmodule Logflare.SqlTest do insert(:source, user: user, name: "a.b.c") input = " select a from `a.b.c`" - assert {:ok, transformed} = SqlV2.transform(:bq_sql, input, user) + assert {:ok, transformed} = Sql.transform(:bq_sql, input, user) assert transformed =~ @single_tenant_bq_project_id refute transformed =~ @logflare_project_id end @@ -281,7 +281,7 @@ defmodule Logflare.SqlTest do user = SingleTenant.get_default_user() input = " select a from `#{@single_tenant_bq_project_id}.my_dataset.my_table`" - assert {:ok, transformed} = SqlV2.transform(:bq_sql, input, user) + assert {:ok, transformed} = Sql.transform(:bq_sql, input, user) assert transformed =~ "#{@single_tenant_bq_project_id}.my_dataset.my_table" refute transformed =~ @logflare_project_id end @@ -293,7 +293,7 @@ defmodule Logflare.SqlTest do other_source = insert(:source, user: user, name: "other.table") input = "select a from my_table" expected = %{"my_table" => Atom.to_string(source.token)} - assert {:ok, ^expected} = SqlV2.sources(input, user) + assert {:ok, ^expected} = Sql.sources(input, user) input = "select a from my_table, `other.table`" @@ -302,7 +302,7 @@ defmodule Logflare.SqlTest do "other.table" => Atom.to_string(other_source.token) } - assert {:ok, ^expected} = SqlV2.sources(input, user) + assert {:ok, ^expected} = Sql.sources(input, user) end test "source_mapping/3 updates an SQL string with renamed sources" do @@ -319,9 +319,9 @@ defmodule Logflare.SqlTest do Ecto.Changeset.change(source, name: "new") |> Logflare.Repo.update() - assert {:ok, output} = SqlV2.source_mapping(input, user.id, mapping) + assert {:ok, output} = Sql.source_mapping(input, user.id, mapping) assert String.downcase(output) == expected - assert {:ok, output} = SqlV2.source_mapping(input, user, mapping) + assert {:ok, output} = Sql.source_mapping(input, user, mapping) assert String.downcase(output) == expected end @@ -342,8 +342,8 @@ defmodule Logflare.SqlTest do {"with q as (select old.a from old where char_length(@c)) select 1", ["c"]}, {"with q as (select @c from old) select 1", ["c"]} ] do - assert {:ok, ^output} = SqlV2.parameters(input) - assert {:ok, ^output} = SqlV2.parameters(input) + assert {:ok, ^output} = Sql.parameters(input) + assert {:ok, ^output} = Sql.parameters(input) end end @@ -379,7 +379,7 @@ defmodule Logflare.SqlTest do expected = {:ok, "SELECT body, event_message, timestamp FROM #{PostgresAdaptor.table_name(source)}"} - assert SqlV2.transform(:pg_sql, input, user) == expected + assert Sql.transform(:pg_sql, input, user) == expected end end end From 997ee114e5f047e79360ca0e9ab1c0b6da11c0fe Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 18:44:41 +0800 Subject: [PATCH 55/62] chore: fix compilation warnings --- lib/logflare/endpoints.ex | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index 5fe8cc698..55c7bc67c 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -1,6 +1,5 @@ defmodule Logflare.Endpoints do @moduledoc false - alias Ecto.Adapters.SQL alias Logflare.Endpoints.Cache alias Logflare.Endpoints.Query alias Logflare.Endpoints.Resolver @@ -9,7 +8,6 @@ defmodule Logflare.Endpoints do alias Logflare.Users alias Logflare.Utils alias Logflare.Backends - alias Logflare.Backends.SourceBackend alias Logflare.Backends.Adaptor.PostgresAdaptor import Ecto.Query From 1a3d3776cd35a6839a785375050823b6f53d8727 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 19:50:11 +0800 Subject: [PATCH 56/62] chore: remove queryable?/0 --- lib/logflare/backends/adaptor.ex | 18 ++---------------- .../backends/adaptor/postgres_adaptor.ex | 2 -- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/lib/logflare/backends/adaptor.ex b/lib/logflare/backends/adaptor.ex index d2eb927f5..cab6b4d40 100644 --- a/lib/logflare/backends/adaptor.ex +++ b/lib/logflare/backends/adaptor.ex @@ -12,16 +12,11 @@ defmodule Logflare.Backends.Adaptor do """ @callback ingest(identifier(), [LogEvent.t()]) :: :ok - @doc """ - Checks if the adaptor can execute queries - """ - @callback queryable? :: boolean() - @doc """ Queries the backend using an endpoint query. """ - @typep query :: Query.t() | Ecto.Query.t() | String.t() - @callback execute_query(identifier(), query()) :: {:ok, [term()]} | {:error, :not_queryable} + @typep query :: Query.t() | Ecto.Query.t() | String.t() | {String.t(), [term()]} + @callback execute_query(identifier(), query()) :: {:ok, [term()]} @doc """ Typecasts config params. @@ -40,15 +35,6 @@ defmodule Logflare.Backends.Adaptor do @impl true def queryable?(), do: false - @impl true - def execute_query(_pid, _query) do - if function_exported?(__MODULE__, :queryable, 0) do - raise "queryable?/0 callback implemented but query execution callback has not been implemented yet!" - else - {:error, :not_queryable} - end - end - @impl true def ingest(_pid, _log_events), do: raise("Ingest callback not implemented!") diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 768438cbc..0ddc799d3 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -71,8 +71,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do |> Ecto.Changeset.validate_format(:url, ~r/postgresql?\:\/\/.+/) end - @impl true - def queryable?(), do: true @impl true def execute_query(%SourceBackend{} = source_backend, %Ecto.Query{} = query) do From a95d682b826bc5cbd4a413c72e34bf7141f1b51a Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 19:50:48 +0800 Subject: [PATCH 57/62] feat: add query query parameterization --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 10 ++++++---- test/logflare/backends/postgres_adaptor_test.exs | 13 +++++++++++++ 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 0ddc799d3..39acc26b3 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -71,7 +71,6 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do |> Ecto.Changeset.validate_format(:url, ~r/postgresql?\:\/\/.+/) end - @impl true def execute_query(%SourceBackend{} = source_backend, %Ecto.Query{} = query) do mod = create_repo(source_backend) @@ -80,11 +79,14 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do {:ok, result} end - def execute_query(%SourceBackend{} = source_backend, query_string) - when is_binary(query_string) do + def execute_query(%SourceBackend{} = source_backend, query_string) when is_binary(query_string), + do: execute_query(source_backend, {query_string, []}) + + def execute_query(%SourceBackend{} = source_backend, {query_string, params}) + when is_binary(query_string) and is_list(params) do mod = create_repo(source_backend) :ok = connect_to_repo(source_backend) - result = Ecto.Adapters.SQL.query!(mod, query_string) + result = Ecto.Adapters.SQL.query!(mod, query_string, params) rows = for row <- result.rows do diff --git a/test/logflare/backends/postgres_adaptor_test.exs b/test/logflare/backends/postgres_adaptor_test.exs index 77399a940..5823e675e 100644 --- a/test/logflare/backends/postgres_adaptor_test.exs +++ b/test/logflare/backends/postgres_adaptor_test.exs @@ -73,6 +73,19 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptorTest do source_backend, "select body from #{PostgresAdaptor.table_name(source_backend)}" ) + + # query by string with parameter + assert {:ok, + [ + %{ + "value" => "data" + } + ]} = + PostgresAdaptor.execute_query( + source_backend, + {"select body ->> $1 as value from #{PostgresAdaptor.table_name(source_backend)}", + ["test"]} + ) end end From 201112a53c9dc10305b151c872c423923b46dcbb Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Fri, 14 Jul 2023 21:13:10 +0800 Subject: [PATCH 58/62] docs: add documentation to PgRepo.execute_query/2 --- lib/logflare/backends/adaptor/postgres_adaptor.ex | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/logflare/backends/adaptor/postgres_adaptor.ex b/lib/logflare/backends/adaptor/postgres_adaptor.ex index 39acc26b3..25c926eca 100644 --- a/lib/logflare/backends/adaptor/postgres_adaptor.ex +++ b/lib/logflare/backends/adaptor/postgres_adaptor.ex @@ -71,6 +71,20 @@ defmodule Logflare.Backends.Adaptor.PostgresAdaptor do |> Ecto.Changeset.validate_format(:url, ~r/postgresql?\:\/\/.+/) end + @doc """ + Executes either an Ecto.Query or an sql string on the Postgres backend. + + If an sql string is provided, one can also provide parameters to be passed. + Parameter placeholders should correspond to Postgres format, i.e. `$#` + + ### Examples + iex> execute_query(souce_backend, from(s in "log_event_...")) + {:ok, [%{...}]} + iex> execute_query(source_backend, "select body from log_event_table") + {:ok, [%{...}]} + iex> execute_query(source_backend, {"select $1 as c from log_event_table", ["value]}) + {:ok, [%{...}]} + """ @impl true def execute_query(%SourceBackend{} = source_backend, %Ecto.Query{} = query) do mod = create_repo(source_backend) From e835a85809198378a585dedbdb5a99612f391144 Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Mon, 17 Jul 2023 19:37:32 +0800 Subject: [PATCH 59/62] chore: fix comilation warnings --- lib/logflare/backends.ex | 2 +- lib/logflare/backends/adaptor.ex | 7 +++---- lib/logflare/endpoints.ex | 4 ++-- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/lib/logflare/backends.ex b/lib/logflare/backends.ex index 0074f0e02..2e81a58e8 100644 --- a/lib/logflare/backends.ex +++ b/lib/logflare/backends.ex @@ -34,7 +34,7 @@ defmodule Logflare.Backends do """ @spec list_source_backends_by_user_id(integer()) :: [SourceBackend.t()] def list_source_backends_by_user_id(id) when is_integer(id) do - from(sb in SourceBackend, join: s in Source, where: s.user_id == ^id) + from(sb in SourceBackend, join: s in Source, on: true, where: s.user_id == ^id) |> Repo.all() |> Enum.map(fn sb -> typecast_config_string_map_to_atom_map(sb) end) end diff --git a/lib/logflare/backends/adaptor.ex b/lib/logflare/backends/adaptor.ex index cab6b4d40..9242b22d3 100644 --- a/lib/logflare/backends/adaptor.ex +++ b/lib/logflare/backends/adaptor.ex @@ -16,7 +16,7 @@ defmodule Logflare.Backends.Adaptor do Queries the backend using an endpoint query. """ @typep query :: Query.t() | Ecto.Query.t() | String.t() | {String.t(), [term()]} - @callback execute_query(identifier(), query()) :: {:ok, [term()]} + @callback execute_query(identifier(), query()) :: {:ok, [term()]} | {:error, :not_implemented} @doc """ Typecasts config params. @@ -32,9 +32,6 @@ defmodule Logflare.Backends.Adaptor do quote do @behaviour Logflare.Backends.Adaptor - @impl true - def queryable?(), do: false - @impl true def ingest(_pid, _log_events), do: raise("Ingest callback not implemented!") @@ -42,6 +39,8 @@ defmodule Logflare.Backends.Adaptor do def validate_config(_config_changeset), do: raise("Config validation callback not implemented!") + @impl true + def execute_query(_identifier, _query), do: {:error, :not_implemented} @impl true def cast_config(_config), do: raise("Config casting callback not implemented!") diff --git a/lib/logflare/endpoints.ex b/lib/logflare/endpoints.ex index 55c7bc67c..011a47c39 100644 --- a/lib/logflare/endpoints.ex +++ b/lib/logflare/endpoints.ex @@ -207,8 +207,8 @@ defmodule Logflare.Endpoints do defp exec_query_on_backend( %Query{language: :pg_sql} = endpoint_query, transformed_query, - declared_params, - params + _declared_params, + _params ) do # find compatible source backend # TODO: move this to Backends module From 1cc455054f50e5e86225b9eee3d34f93881026fb Mon Sep 17 00:00:00 2001 From: TzeYiing Date: Mon, 17 Jul 2023 20:03:23 +0800 Subject: [PATCH 60/62] chore: fix flaky test --- test/logflare/sql_test.exs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/logflare/sql_test.exs b/test/logflare/sql_test.exs index 24bb465a3..f1cfe37dc 100644 --- a/test/logflare/sql_test.exs +++ b/test/logflare/sql_test.exs @@ -366,7 +366,7 @@ defmodule Logflare.SqlTest do describe "transform/3 for :postgres backends" do setup do user = insert(:user) - source = insert(:source, user: user, name: "source_#{TestUtils.random_string()}") + source = insert(:source, user: user, name: "source_a") %{user: user, source: source} end From e5445a122aeccfa63ae80441f5bb7101a6b56d0f Mon Sep 17 00:00:00 2001 From: Chase Granberry Date: Mon, 17 Jul 2023 06:59:27 -0700 Subject: [PATCH 61/62] fix: Broadcast buffers only every 5 seconds for now --- lib/logflare/source/bigquery/buffer_counter.ex | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/logflare/source/bigquery/buffer_counter.ex b/lib/logflare/source/bigquery/buffer_counter.ex index 7a84e245e..da0e148e9 100644 --- a/lib/logflare/source/bigquery/buffer_counter.ex +++ b/lib/logflare/source/bigquery/buffer_counter.ex @@ -11,7 +11,7 @@ defmodule Logflare.Source.BigQuery.BufferCounter do require Logger - @broadcast_every 1_000 + @broadcast_every 5_000 @max_buffer_len 5_000 def start_link(%RLS{source_id: source_id}) when is_atom(source_id) do From 0b8fc790b274e0ca2b400c03364024c5e38a4ac6 Mon Sep 17 00:00:00 2001 From: Chase Granberry Date: Mon, 17 Jul 2023 07:00:37 -0700 Subject: [PATCH 62/62] fix: bump version --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 672b322cd..32b1dc78d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.3.18 \ No newline at end of file +1.3.19 \ No newline at end of file