diff --git a/crates/bestool/src/actions/tamanu/alerts.rs b/crates/bestool/src/actions/tamanu/alerts.rs
index 3232ee5..f34e881 100644
--- a/crates/bestool/src/actions/tamanu/alerts.rs
+++ b/crates/bestool/src/actions/tamanu/alerts.rs
@@ -1,1321 +1,10 @@
-use std::{
- collections::HashMap,
- convert::Infallible,
- env::current_dir,
- error::Error,
- fmt::Display,
- io::Write,
- ops::ControlFlow,
- path::{Path, PathBuf},
- process,
- sync::Arc,
- time::Duration,
-};
+pub use command::*;
-use bytes::{BufMut, BytesMut};
-use chrono::{DateTime, Utc};
-use clap::Parser;
-use folktime::duration::{Duration as Folktime, Style as FolkStyle};
-use futures::{future::join_all, TryFutureExt};
-use html2md_rs::to_md::safe_from_html_to_md;
-use mailgun_rs::{EmailAddress, Mailgun, Message};
-use miette::{miette, Context as _, IntoDiagnostic, Result};
-use reqwest::Url;
-use serde_json::json;
-use sysinfo::System;
-use tera::{Context as TeraCtx, Tera};
-use tokio::{io::AsyncReadExt as _, task::JoinSet, time::timeout};
-use tokio_postgres::types::{IsNull, ToSql, Type};
-use tracing::{debug, error, info, instrument, warn};
-use walkdir::WalkDir;
-
-use crate::{actions::Context, postgres_to_value::rows_to_value_map};
-
-use super::{config::load_config, find_package, find_tamanu, TamanuArgs};
-
-const DEFAULT_SUBJECT_TEMPLATE: &str = "[Tamanu Alert] {{ filename }} ({{ hostname }})";
-
-/// Execute alert definitions against Tamanu.
-///
-/// An alert definition is a YAML file that describes a single alert
-/// source and targets to send triggered alerts to.
-///
-/// This tool reads both database and email credentials from Tamanu's own
-/// configuration files, see the tamanu subcommand help (one level above) for
-/// more on how that's determined.
-///
-/// # Example
-///
-/// ```yaml
-/// sql: |
-/// SELECT * FROM fhir.jobs
-/// WHERE error IS NOT NULL
-/// AND created_at > $1
-///
-/// send:
-/// - target: email:
-/// addresses: [alerts@tamanu.io]
-/// subject: "FHIR job errors ({{ hostname }})"
-/// template: |
-/// Automated alert! There have been {{ rows | length }} FHIR jobs
-/// with errors in the past {{ interval }}. Here are the first 5:
-/// {% for row in rows | first(5) %}
-/// - {{ row.topic }}: {{ row.error }}
-/// {% endfor %}
-/// ```
-///
-/// # Template variables
-///
-/// Templates are rendered with [Tera](https://keats.github.io/tera/docs/#templates).
-///
-/// - `rows`: the result of the SQL query, as a list of objects (if source = sql)
-/// - `output`: the result of the shell command (if source = shell)
-/// - `interval`: the duration string of the alert interval
-/// - `hostname`: the hostname of the machine running this command
-/// - `filename`: the name of the alert definition file
-/// - `now`: the current date and time
-///
-/// Additionally you can `{% include "subject" %}` to include the rendering of
-/// the subject template in the email template.
-///
-/// # Sources
-///
-/// Each alert must have one source that it executes to determine whether the
-/// alert is triggered or not. Current sources: `sql`, `shell`.
-///
-/// ## SQL
-///
-/// This source executes a SQL query, which can have a binding for a datetime in
-/// the past to limit results by; returning any rows indicates an alert trigger.
-///
-/// ```yaml
-/// sql: |
-/// SELECT 1 + 1
-/// ```
-///
-/// ### Query binding parameters
-///
-/// The SQL query will be passed exactly the number of parameters it expects.
-/// The parameters are always provided in this order:
-///
-/// - `$1`: the datetime of the start of the interval (timestamp with time zone)
-/// - `$2`: the interval duration (interval)
-///
-/// ## Shell
-///
-/// This source executes a shell script. Returning a non-zero exit code
-/// indicates an alert trigger. The stdout of the script will be the `output`
-/// template variable.
-///
-/// ```yaml
-/// shell: bash
-/// run: |
-/// echo foo
-/// exit 1
-/// ```
-///
-/// # Send targets
-///
-/// You can send triggered alerts to one or more different targets. Current send
-/// targets are: `email`, `slack`, `zendesk`. Note that you can have multiple
-/// targets of the same type.
-///
-/// ## Email
-///
-/// ```yaml
-/// send:
-/// - target: email
-/// addresses:
-/// - staff@job.com
-/// - support@job.com
-/// ```
-///
-/// ## Slack
-///
-/// ```yaml
-/// send:
-/// - target: slack
-/// webhook: https://hooks.slack.com/services/...
-/// template: |
-/// _Alert!_ There are {{ rows | length }} rows with errors.
-/// ```
-///
-/// You can customise the payload sent to Slack by specifying fields:
-///
-/// ```yaml
-/// send:
-/// - target: slack
-/// webhook: https://hooks.slack.com/services/...
-/// # ...
-/// fields:
-/// - name: alertname
-/// field: filename # this will be replaced with the filename of the alert
-/// - name: deployment
-/// value: production # this will be the exact value 'production'
-/// ```
-///
-/// The default set of fields is:
-///
-/// ```yaml
-/// - name: hostname
-/// field: hostname
-/// - name: filename
-/// field: filename
-/// - name: subject
-/// field: subject
-/// - name: message
-/// field: body
-/// ```
-///
-/// Overriding the `fields` will replace the default set entirely (so you may
-/// want to include all the ones you're not changing).
-///
-/// ## Zendesk (authenticated)
-///
-/// ```yaml
-/// send:
-/// - target: zendesk
-/// endpoint: https://example.zendesk.com/api/v2/requests
-/// credentials:
-/// email: foo@example.com
-/// password: pass
-/// ticket_form_id: 500
-/// custom_fields:
-/// - id: 100
-/// value: tamanu_
-/// - id: 200
-/// value: Test
-/// ```
-///
-/// ## Zendesk (anonymous)
-///
-/// ```yaml
-/// send:
-/// - target: zendesk
-/// endpoint: https://example.zendesk.com/api/v2/requests
-/// requester: Name of requester
-/// ticket_form_id: 500
-/// custom_fields:
-/// - id: 100
-/// value: tamanu_
-/// - id: 200
-/// value: Test
-/// ```
-///
-/// ## External targets
-///
-/// It can be tedious to specify and update the same addresses in many different
-/// alert files, especially for more complex send targets. You can create a
-/// `_targets.yml` file in any of the `--dir`s (if there are multiple such files
-/// they will be merged).
-///
-/// ```yaml
-/// targets:
-/// - id: email-staff
-/// target: email
-/// addresses:
-/// - staff@job.com
-/// - id: zendesk-normal
-/// target: zendesk
-/// endpoint: https://...
-/// credentials:
-/// email: the@bear.com
-/// password: ichooseyou
-/// ```
-///
-/// The `subject` and `template` fields are omitted in the `_targets.yml`.
-///
-/// Then in the alerts file, specify `external` targets, with the relevant `id`s
-/// and the `subject` and `template`:
-///
-/// ```yaml
-/// send:
-/// - target: external
-/// id: email-staff
-/// subject: [Alert] Something is wrong
-/// template: |
-///
Whoops
-/// ```
-///
-/// If you specify multiple external targets with the same `id`, the alert will be
-/// multiplexed (i.e. sent to all targets with that `id`). This can be useful for
-/// sending alerts to both email and slack, or for debugging by temporarily sending
-/// alerts to an additional target.
-///
-/// ---
-/// As this documentation is a bit hard to read in the terminal, you may want to
-/// consult the online version:
-///
-#[cfg_attr(docsrs, doc("\n\n**Command**: `bestool tamanu alerts`"))]
-#[derive(Debug, Clone, Parser)]
-#[clap(verbatim_doc_comment)]
-pub struct AlertsArgs {
- /// Folder containing alert definitions.
- ///
- /// This folder will be read recursively for files with the `.yaml` or `.yml` extension.
- ///
- /// Files that don't match the expected format will be skipped, as will files with
- /// `enabled: false` at the top level. Syntax errors will be reported for YAML files.
- ///
- /// It's entirely valid to provide a folder that only contains a `_targets.yml` file.
- ///
- /// Can be provided multiple times. Defaults to (depending on platform): `C:\Tamanu\alerts`,
- /// `C:\Tamanu\{current-version}\alerts`, `/opt/tamanu-toolbox/alerts`, `/etc/tamanu/alerts`,
- /// `/alerts`, and `./alerts`.
- #[arg(long)]
- pub dir: Vec,
-
- /// How far back to look for alerts.
- ///
- /// This is a duration string, e.g. `1d` for one day, `1h` for one hour, etc. It should match
- /// the task scheduling / cron interval for this command.
- #[arg(long, default_value = "15m")]
- pub interval: humantime::Duration,
-
- /// Timeout for each alert.
- ///
- /// If an alert takes longer than this to query the database or run the shell script, it will be
- /// skipped. Defaults to 30 seconds.
- ///
- /// This is a duration string, e.g. `1d` for one day, `1h` for one hour, etc.
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `--interval DURATION`"))]
- #[arg(long, default_value = "30s")]
- pub timeout: humantime::Duration,
-
- /// Don't actually send alerts, just print them to stdout.
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `--dry-run`"))]
- #[arg(long)]
- pub dry_run: bool,
-}
-
-#[derive(serde::Deserialize, Debug)]
-struct TamanuConfig {
- db: TamanuDb,
- mailgun: TamanuMailgun,
-}
-
-#[derive(serde::Deserialize, Debug)]
-struct TamanuDb {
- host: Option,
- name: String,
- username: String,
- password: String,
-}
-
-#[derive(serde::Deserialize, Debug)]
-struct TamanuMailgun {
- domain: String,
- #[serde(rename = "apiKey")]
- api_key: String,
- #[serde(rename = "from")]
- sender: String,
-}
-
-fn enabled() -> bool {
- true
-}
-
-#[derive(serde::Deserialize, Debug, Default)]
-struct AlertDefinition {
- #[serde(default, skip)]
- file: PathBuf,
-
- #[serde(default = "enabled")]
- enabled: bool,
- #[serde(skip)]
- interval: Duration,
- #[serde(default)]
- send: Vec,
-
- #[serde(flatten)]
- source: TicketSource,
-
- // legacy email-only fields
- #[serde(default)]
- recipients: Vec,
- subject: Option,
- template: Option,
-}
-
-#[derive(serde::Deserialize, Debug, Default)]
-#[serde(untagged, deny_unknown_fields)]
-enum TicketSource {
- Sql {
- sql: String,
- },
- Shell {
- shell: String,
- run: String,
- },
-
- #[default]
- None,
-}
-
-#[derive(serde::Deserialize, Debug, Clone)]
-#[serde(rename_all = "snake_case", tag = "target")]
-enum SendTarget {
- Email {
- subject: Option,
- template: String,
- #[serde(flatten)]
- conn: TargetEmail,
- },
- Zendesk {
- subject: Option,
- template: String,
- #[serde(flatten)]
- conn: TargetZendesk,
- },
- Slack {
- subject: Option,
- template: String,
- #[serde(flatten)]
- conn: TargetSlack,
- },
- External {
- subject: Option,
- template: String,
- id: String,
- },
-}
-
-impl SendTarget {
- fn resolve_external(
- &self,
- external_targets: &HashMap>,
- ) -> Option> {
- match self {
- Self::External {
- id,
- subject,
- template,
- } => external_targets.get(id).map(|exts| {
- exts.iter()
- .map(|ext| match ext {
- ExternalTarget::Email { conn, .. } => SendTarget::Email {
- subject: subject.clone(),
- template: template.clone(),
- conn: conn.clone(),
- },
- ExternalTarget::Zendesk { conn, .. } => SendTarget::Zendesk {
- subject: subject.clone(),
- template: template.clone(),
- conn: conn.clone(),
- },
- ExternalTarget::Slack { conn, .. } => SendTarget::Slack {
- subject: subject.clone(),
- template: template.clone(),
- conn: conn.clone(),
- },
- })
- .collect()
- }),
- _ => None,
- }
- }
-}
-
-#[derive(serde::Deserialize, Debug)]
-struct AlertTargets {
- targets: Vec,
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-#[serde(rename_all = "snake_case", tag = "target")]
-enum ExternalTarget {
- Email {
- id: String,
- #[serde(flatten)]
- conn: TargetEmail,
- },
- Zendesk {
- id: String,
- #[serde(flatten)]
- conn: TargetZendesk,
- },
- Slack {
- id: String,
- #[serde(flatten)]
- conn: TargetSlack,
- },
-}
-
-impl ExternalTarget {
- fn id(&self) -> &str {
- match self {
- Self::Email { id, .. } => id,
- Self::Zendesk { id, .. } => id,
- Self::Slack { id, .. } => id,
- }
- }
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-#[serde(rename_all = "snake_case")]
-struct TargetEmail {
- addresses: Vec,
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-#[serde(rename_all = "snake_case")]
-struct TargetZendesk {
- endpoint: Url,
-
- #[serde(flatten)]
- method: ZendeskMethod,
-
- ticket_form_id: Option,
- #[serde(default)]
- custom_fields: Vec,
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-#[serde(rename_all = "snake_case")]
-struct TargetSlack {
- webhook: Url,
-
- #[serde(default = "SlackField::default_set")]
- fields: Vec,
-}
-
-#[derive(serde::Deserialize, Clone, Copy, Debug)]
-#[serde(rename_all = "snake_case")]
-enum TemplateField {
- Filename,
- Subject,
- Body,
- Hostname,
- Requester,
- Interval,
-}
-
-impl TemplateField {
- fn as_str(self) -> &'static str {
- match self {
- Self::Filename => "filename",
- Self::Subject => "subject",
- Self::Body => "body",
- Self::Hostname => "hostname",
- Self::Requester => "requester",
- Self::Interval => "interval",
- }
- }
-}
-
-impl Display for TemplateField {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, "{}", self.as_str())
- }
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-#[serde(untagged, rename_all = "snake_case")]
-enum SlackField {
- Fixed { name: String, value: String },
- Field { name: String, field: TemplateField },
-}
-
-impl SlackField {
- fn default_set() -> Vec {
- vec![
- Self::Field {
- name: "hostname".into(),
- field: TemplateField::Hostname,
- },
- Self::Field {
- name: "filename".into(),
- field: TemplateField::Filename,
- },
- Self::Field {
- name: "subject".into(),
- field: TemplateField::Subject,
- },
- Self::Field {
- name: "message".into(),
- field: TemplateField::Body,
- },
- ]
- }
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-#[serde(untagged, deny_unknown_fields)]
-enum ZendeskMethod {
- // Make credentials and requester fields exclusive as specifying the requester object in authorized
- // request is invalid. We may be able to specify some account as the requester, but it's not
- // necessary. That's because the requester defaults to the authenticated account.
- Authorized { credentials: ZendeskCredentials },
- Anonymous { requester: String },
-}
-
-#[derive(serde::Deserialize, Clone, Debug)]
-struct ZendeskCredentials {
- email: String,
- password: String,
-}
-
-#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
-struct ZendeskCustomField {
- id: u64,
- value: String,
-}
-
-impl AlertDefinition {
- fn normalise(mut self, external_targets: &HashMap>) -> Self {
- if !self.recipients.is_empty() {
- self.send.push(SendTarget::Email {
- subject: self.subject,
- template: self.template.unwrap_or_default(),
- conn: TargetEmail {
- addresses: self.recipients,
- },
- });
- self.recipients = vec![];
- self.subject = None;
- self.template = None;
- }
-
- self.send = self
- .send
- .iter()
- .flat_map(|target| match target {
- target @ SendTarget::External { id, .. } => target
- .resolve_external(external_targets)
- .unwrap_or_else(|| {
- error!(id, "external target not found");
- Vec::new()
- }),
- other => vec![other.clone()],
- })
- .collect();
-
- self
- }
-}
-
-struct InternalContext {
- pg_client: tokio_postgres::Client,
- http_client: reqwest::Client,
-}
-
-async fn default_dirs(root: &Path) -> Vec {
- let mut dirs = vec![
- PathBuf::from(r"C:\Tamanu\alerts"),
- root.join("alerts"),
- PathBuf::from("/opt/tamanu-toolbox/alerts"),
- PathBuf::from("/etc/tamanu/alerts"),
- PathBuf::from("/alerts"),
- ];
- if let Ok(cwd) = current_dir() {
- dirs.push(cwd.join("alerts"));
- }
-
- join_all(dirs.into_iter().map(|dir| async {
- if dir.exists() {
- Some(dir)
- } else {
- None
- }
- }))
- .await
- .into_iter()
- .flatten()
- .collect()
-}
-
-pub async fn run(ctx: Context) -> Result<()> {
- let (_, root) = find_tamanu(&ctx.args_top)?;
- let kind = find_package(&root);
- let config_value = load_config(&root, kind.package_name())?;
- let config: TamanuConfig = serde_json::from_value(config_value)
- .into_diagnostic()
- .wrap_err("parsing of Tamanu config failed")?;
- debug!(?config, "parsed Tamanu config");
-
- let dirs = if ctx.args_sub.dir.is_empty() {
- default_dirs(&root).await
- } else {
- ctx.args_sub.dir
- };
- debug!(?dirs, "searching for alerts");
-
- let mut alerts = Vec::::new();
- let mut external_targets = HashMap::new();
- for dir in dirs {
- let external_targets_path = dir.join("_targets.yml");
- if let Some(AlertTargets { targets }) = std::fs::read_to_string(&external_targets_path)
- .ok()
- .and_then(|content| {
- debug!(path=?external_targets_path, "parsing external targets");
- serde_yml::from_str::(&content)
- .map_err(
- |err| warn!(path=?external_targets_path, "_targets.yml has errors! {err}"),
- )
- .ok()
- }) {
- for target in targets {
- external_targets
- .entry(target.id().into())
- .or_insert(Vec::new())
- .push(target);
- }
- }
-
- alerts.extend(
- WalkDir::new(dir)
- .into_iter()
- .filter_map(|e| e.ok())
- .filter(|e| e.file_type().is_file())
- .map(|entry| {
- let file = entry.path();
-
- if !file
- .extension()
- .map_or(false, |e| e == "yaml" || e == "yml")
- {
- return Ok(None);
- }
-
- if file.file_stem().map_or(false, |n| n == "_targets") {
- return Ok(None);
- }
-
- debug!(?file, "parsing YAML file");
- let content = std::fs::read_to_string(file)
- .into_diagnostic()
- .wrap_err(format!("{file:?}"))?;
- let mut alert: AlertDefinition = serde_yml::from_str(&content)
- .into_diagnostic()
- .wrap_err(format!("{file:?}"))?;
-
- alert.file = file.to_path_buf();
- alert.interval = ctx.args_sub.interval.into();
- debug!(?alert, "parsed alert file");
- Ok(if alert.enabled { Some(alert) } else { None })
- })
- .filter_map(|def: Result>| match def {
- Err(err) => {
- error!("{err:?}");
- None
- }
- Ok(def) => def,
- }),
- );
- }
-
- if alerts.is_empty() {
- info!("no alerts found, doing nothing");
- return Ok(());
- }
-
- if !external_targets.is_empty() {
- debug!(count=%external_targets.len(), "found some external targets");
- }
-
- for alert in &mut alerts {
- *alert = std::mem::take(alert).normalise(&external_targets);
- }
- debug!(count=%alerts.len(), "found some alerts");
-
- let mut pg_config = tokio_postgres::Config::default();
- pg_config.application_name(&format!(
- "{}/{} (tamanu alerts)",
- env!("CARGO_PKG_NAME"),
- env!("CARGO_PKG_VERSION")
- ));
- if let Some(host) = &config.db.host {
- pg_config.host(host);
- } else {
- pg_config.host("localhost");
- }
- pg_config.user(&config.db.username);
- pg_config.password(&config.db.password);
- pg_config.dbname(&config.db.name);
- info!(config=?pg_config, "connecting to Tamanu database");
- let (client, connection) = pg_config
- .connect(tokio_postgres::NoTls)
- .await
- .into_diagnostic()?;
- tokio::spawn(async move {
- if let Err(e) = connection.await {
- eprintln!("connection error: {}", e);
- }
- });
-
- let mailgun = Arc::new(config.mailgun);
- let internal_ctx = Arc::new(InternalContext {
- pg_client: client,
- http_client: reqwest::Client::new(),
- });
-
- let mut set = JoinSet::new();
- for alert in alerts {
- let internal_ctx = internal_ctx.clone();
- let dry_run = ctx.args_sub.dry_run;
- let mailgun = mailgun.clone();
- let timeout_d: Duration = ctx.args_sub.timeout.into();
- let name = alert.file.clone();
- set.spawn(
- timeout(timeout_d, async move {
- let error = format!("while executing alert: {}", alert.file.display());
- if let Err(err) = execute_alert(internal_ctx, mailgun, alert, dry_run)
- .await
- .wrap_err(error)
- {
- eprintln!("{err:?}");
- }
- })
- .or_else(move |elapsed| async move {
- error!(alert=?name, "timeout: {elapsed:?}");
- Ok::<_, Infallible>(())
- }),
- );
- }
-
- while let Some(res) = set.join_next().await {
- match res {
- Err(err) => {
- error!("task: {err:?}");
- }
- _ => (),
- }
- }
-
- Ok(())
-}
-
-#[instrument]
-fn load_templates(target: &SendTarget) -> Result {
- let mut tera = tera::Tera::default();
-
- match target {
- SendTarget::Email {
- subject, template, ..
- }
- | SendTarget::Zendesk {
- subject, template, ..
- }
- | SendTarget::Slack {
- subject, template, ..
- }
- | SendTarget::External {
- subject, template, ..
- } => {
- tera.add_raw_template(
- TemplateField::Subject.as_str(),
- subject.as_deref().unwrap_or(DEFAULT_SUBJECT_TEMPLATE),
- )
- .into_diagnostic()
- .wrap_err("compiling subject template")?;
- tera.add_raw_template(TemplateField::Body.as_str(), &template)
- .into_diagnostic()
- .wrap_err("compiling body template")?;
- }
- }
-
- if let SendTarget::Zendesk {
- conn: TargetZendesk {
- method: ZendeskMethod::Anonymous { requester },
- ..
- },
- ..
- } = target
- {
- tera.add_raw_template(TemplateField::Requester.as_str(), requester)
- .into_diagnostic()
- .wrap_err("compiling requester template")?;
- }
- Ok(tera)
-}
-
-#[instrument(skip(alert, now))]
-fn build_context(alert: &AlertDefinition, now: chrono::DateTime) -> TeraCtx {
- let mut context = TeraCtx::new();
- context.insert(
- TemplateField::Interval.as_str(),
- &format!(
- "{}",
- Folktime::new(alert.interval).with_style(FolkStyle::OneUnitWhole)
- ),
- );
- context.insert(
- TemplateField::Hostname.as_str(),
- System::host_name().as_deref().unwrap_or("unknown"),
- );
- context.insert(
- TemplateField::Filename.as_str(),
- &alert.file.file_name().unwrap().to_string_lossy(),
- );
- context.insert("now", &now.to_string());
-
- context
-}
-
-#[instrument(skip(client, alert, not_before, context))]
-async fn read_sources(
- client: &tokio_postgres::Client,
- alert: &AlertDefinition,
- not_before: DateTime,
- context: &mut TeraCtx,
-) -> Result> {
- match &alert.source {
- TicketSource::None => {
- debug!(?alert.file, "no source, skipping");
- return Ok(ControlFlow::Break(()));
- }
- TicketSource::Sql { sql } => {
- let statement = client.prepare(sql).await.into_diagnostic()?;
-
- let interval = Interval(alert.interval);
- let all_params: Vec<&(dyn ToSql + Sync)> = vec![¬_before, &interval];
-
- let rows = client
- .query(&statement, &all_params[..statement.params().len()])
- .await
- .into_diagnostic()
- .wrap_err("querying database")?;
-
- if rows.is_empty() {
- debug!(?alert.file, "no rows returned, skipping");
- return Ok(ControlFlow::Break(()));
- }
- info!(?alert.file, rows=%rows.len(), "alert triggered");
-
- let context_rows = rows_to_value_map(&rows);
-
- context.insert("rows", &context_rows);
- }
- TicketSource::Shell { shell, run } => {
- let mut script = tempfile::Builder::new().tempfile().into_diagnostic()?;
- write!(script.as_file_mut(), "{run}").into_diagnostic()?;
-
- let mut shell = tokio::process::Command::new(shell)
- .arg(script.path())
- .stdin(process::Stdio::null())
- .stdout(process::Stdio::piped())
- .spawn()
- .into_diagnostic()?;
-
- let mut output = Vec::new();
- let mut stdout = shell
- .stdout
- .take()
- .ok_or_else(|| miette!("getting the child stdout handle"))?;
- let output_future =
- futures::future::try_join(shell.wait(), stdout.read_to_end(&mut output));
-
- let Ok(res) = tokio::time::timeout(alert.interval, output_future).await else {
- warn!(?alert.file, "the script timed out, skipping");
- shell.kill().await.into_diagnostic()?;
- return Ok(ControlFlow::Break(()));
- };
-
- let (status, output_size) = res.into_diagnostic().wrap_err("running the shell")?;
-
- if status.success() {
- debug!(?alert.file, "the script succeeded, skipping");
- return Ok(ControlFlow::Break(()));
- }
- info!(?alert.file, ?status, ?output_size, "alert triggered");
-
- context.insert("output", &String::from_utf8_lossy(&output));
- }
- }
- Ok(ControlFlow::Continue(()))
-}
-
-#[instrument(skip(tera, context))]
-fn render_alert(tera: &Tera, context: &mut TeraCtx) -> Result<(String, String, Option)> {
- let subject = tera
- .render(TemplateField::Subject.as_str(), &context)
- .into_diagnostic()
- .wrap_err("rendering subject template")?;
-
- context.insert(TemplateField::Subject.as_str(), &subject.to_string());
-
- let body = tera
- .render(TemplateField::Body.as_str(), &context)
- .into_diagnostic()
- .wrap_err("rendering email template")?;
-
- let requester = tera
- .render(TemplateField::Requester.as_str(), &context)
- .map(Some)
- .or_else(|err| match err.kind {
- tera::ErrorKind::TemplateNotFound(_) => Ok(None),
- _ => Err(err),
- })
- .into_diagnostic()
- .wrap_err("rendering requester template")?;
-
- Ok((subject, body, requester))
-}
-
-async fn execute_alert(
- ctx: Arc,
- mailgun: Arc,
- alert: AlertDefinition,
- dry_run: bool,
-) -> Result<()> {
- info!(?alert.file, "executing alert");
-
- let now = crate::now_time(&chrono::Utc);
- let not_before = now - alert.interval;
- info!(?now, ?not_before, interval=?alert.interval, "date range for alert");
-
- let mut tera_ctx = build_context(&alert, now);
- if read_sources(&ctx.pg_client, &alert, not_before, &mut tera_ctx)
- .await?
- .is_break()
- {
- return Ok(());
- }
-
- for target in &alert.send {
- let tera = load_templates(target)?;
- let (subject, body, requester) = render_alert(&tera, &mut tera_ctx)?;
-
- match target {
- SendTarget::Email {
- conn: TargetEmail { addresses },
- ..
- } => {
- if dry_run {
- println!("-------------------------------");
- println!("Alert: {}", alert.file.display());
- println!("Recipients: {}", addresses.join(", "));
- println!("Subject: {subject}");
- println!("Body: {body}");
- continue;
- }
-
- debug!(?alert.recipients, "sending email");
- let sender = EmailAddress::address(&mailgun.sender);
- let mailgun = Mailgun {
- api_key: mailgun.api_key.clone(),
- domain: mailgun.domain.clone(),
- };
- let message = Message {
- to: addresses
- .iter()
- .map(|email| EmailAddress::address(email))
- .collect(),
- subject,
- html: body,
- ..Default::default()
- };
- mailgun
- .async_send(mailgun_rs::MailgunRegion::US, &sender, message)
- .await
- .into_diagnostic()
- .wrap_err("sending email")?;
- }
-
- SendTarget::Slack {
- conn: TargetSlack { webhook, fields },
- ..
- } => {
- if dry_run {
- println!("-------------------------------");
- println!("Alert: {}", alert.file.display());
- println!("Recipients: slack");
- println!("Subject: {subject}");
- println!("Body: {body}");
- continue;
- }
-
- let payload: HashMap<&String, String> = fields
- .iter()
- .map(|field| match field {
- SlackField::Fixed { name, value } => (name, value.clone()),
- SlackField::Field { name, field } => (
- name,
- tera.render(field.as_str(), &tera_ctx)
- .ok()
- .or_else(|| {
- tera_ctx.get(field.as_str()).map(|v| {
- let v = match v.as_str() {
- Some(t) => t.to_owned(),
- None => v.to_string(),
- };
- safe_from_html_to_md(v.clone()).unwrap_or(v)
- })
- })
- .unwrap_or_default(),
- ),
- })
- .collect();
-
- debug!(?webhook, ?payload, "posting to slack webhook");
- ctx.http_client
- .post(webhook.clone())
- .json(&payload)
- .send()
- .await
- .into_diagnostic()
- .wrap_err("posting to slack webhook")?;
- }
-
- SendTarget::Zendesk {
- conn:
- TargetZendesk {
- endpoint,
- method,
- ticket_form_id,
- custom_fields,
- },
- ..
- } => {
- if dry_run {
- println!("-------------------------------");
- println!("Alert: {}", alert.file.display());
- println!("Endpoint: {}", endpoint);
- println!("Subject: {subject}");
- println!("Body: {body}");
- continue;
- }
-
- let req = json!({
- "request": {
- "subject": subject,
- "ticket_form_id": ticket_form_id,
- "custom_fields": custom_fields,
- "comment": { "html_body": body },
- "requester": requester.map(|r| json!({ "name": r }))
- }
- });
-
- let mut req_builder = ctx.http_client.post(endpoint.clone()).json(&req);
-
- if let ZendeskMethod::Authorized {
- credentials: ZendeskCredentials { email, password },
- } = method
- {
- req_builder =
- req_builder.basic_auth(std::format_args!("{email}/token"), Some(password));
- }
-
- req_builder
- .send()
- .await
- .into_diagnostic()
- .wrap_err("creating Zendesk ticket")?;
- debug!("Zendesk ticket sent");
- }
-
- SendTarget::External { .. } => {
- unreachable!("external targets should be resolved before here");
- }
- }
- }
-
- Ok(())
-}
-
-#[derive(Debug)]
-struct Interval(pub Duration);
-
-impl ToSql for Interval {
- fn to_sql(&self, _: &Type, out: &mut BytesMut) -> Result> {
- out.put_i64(self.0.as_micros().try_into().unwrap_or_default());
- out.put_i32(0);
- out.put_i32(0);
- Ok(IsNull::No)
- }
-
- fn accepts(ty: &Type) -> bool {
- matches!(*ty, Type::INTERVAL)
- }
-
- tokio_postgres::types::to_sql_checked!();
-}
+mod command;
+mod definition;
+mod pg_interval;
+mod targets;
+mod templates;
#[cfg(test)]
-mod tests {
- use chrono::{Duration, Utc};
-
- use super::*;
-
- fn interval_context(dur: Duration) -> Option {
- let alert = AlertDefinition {
- file: PathBuf::from("test.yaml"),
- enabled: true,
- interval: dur.to_std().unwrap(),
- source: TicketSource::Sql { sql: "".into() },
- send: vec![],
- recipients: vec![],
- subject: None,
- template: None,
- };
- build_context(&alert, Utc::now())
- .get("interval")
- .and_then(|v| v.as_str())
- .map(|s| s.to_owned())
- }
-
- #[test]
- fn test_interval_format_minutes() {
- assert_eq!(
- interval_context(Duration::minutes(15)).as_deref(),
- Some("15m"),
- );
- }
-
- #[test]
- fn test_interval_format_hour() {
- assert_eq!(interval_context(Duration::hours(1)).as_deref(), Some("1h"),);
- }
-
- #[test]
- fn test_interval_format_day() {
- assert_eq!(interval_context(Duration::days(1)).as_deref(), Some("1d"),);
- }
-
- #[test]
- fn test_alert_parse_email() {
- let alert = r#"
-sql: SELECT $1::timestamptz;
-send:
-- target: email
- addresses: [test@example.com]
- subject: "[Tamanu Alert] Example ({{ hostname }})"
- template: |
- Server: {{ hostname }}
- There are {{ rows | length }} rows.
-"#;
- let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
- let alert = alert.normalise(&Default::default());
- assert_eq!(alert.interval, std::time::Duration::default());
- assert!(
- matches!(alert.source, TicketSource::Sql { sql } if sql == "SELECT $1::timestamptz;")
- );
- assert!(matches!(alert.send[0], SendTarget::Email { .. }));
- }
-
- #[test]
- fn test_alert_parse_shell() {
- let alert = r#"
-shell: bash
-run: echo foobar
-"#;
- let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
- let alert = alert.normalise(&Default::default());
- assert_eq!(alert.interval, std::time::Duration::default());
- assert!(
- matches!(alert.source, TicketSource::Shell { shell, run } if shell == "bash" && run == "echo foobar")
- );
- }
-
- #[test]
- fn test_alert_parse_invalid_source() {
- let alert = r#"
-shell: bash
-"#;
- assert!(matches!(
- serde_yml::from_str::(&alert),
- Err(_)
- ));
- let alert = r#"
-run: echo foo
-"#;
- assert!(matches!(
- serde_yml::from_str::(&alert),
- Err(_)
- ));
- let alert = r#"
-sql: SELECT $1::timestamptz;
-run: echo foo
-"#;
- assert!(matches!(
- serde_yml::from_str::(&alert),
- Err(_)
- ));
- let alert = r#"
-sql: SELECT $1::timestamptz;
-shell: bash
-"#;
- assert!(matches!(
- serde_yml::from_str::(&alert),
- Err(_)
- ));
- let alert = r#"
-sql: SELECT $1::timestamptz;
-shell: bash
-run: echo foo
-"#;
- assert!(matches!(
- serde_yml::from_str::(&alert),
- Err(_)
- ));
- }
-
- #[test]
- fn test_alert_parse_zendesk_authorized() {
- let alert = r#"
-sql: SELECT $1::timestamptz;
-send:
-- target: zendesk
- endpoint: https://example.zendesk.com/api/v2/requests
- credentials:
- email: foo@example.com
- password: pass
- subject: "[Tamanu Alert] Example ({{ hostname }})"
- template: "Output: {{ output }}""#;
- let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
- assert!(matches!(alert.send[0], SendTarget::Zendesk { .. }));
- }
-
- #[test]
- fn test_alert_parse_zendesk_anon() {
- let alert = r#"
-sql: SELECT $1::timestamptz;
-send:
-- target: zendesk
- endpoint: https://example.zendesk.com/api/v2/requests
- requester: "{{ hostname }}"
- subject: "[Tamanu Alert] Example ({{ hostname }})"
- template: "Output: {{ output }}""#;
- let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
- assert!(matches!(alert.send[0], SendTarget::Zendesk { .. }));
- }
-
- #[test]
- fn test_alert_parse_zendesk_form_fields() {
- let alert = r#"
-sql: SELECT $1::timestamptz;
-send:
-- target: zendesk
- endpoint: https://example.zendesk.com/api/v2/requests
- requester: "{{ hostname }}"
- subject: "[Tamanu Alert] Example ({{ hostname }})"
- template: "Output: {{ output }}"
- ticket_form_id: 500
- custom_fields:
- - id: 100
- value: tamanu_
- - id: 200
- value: Test
-"#;
- let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
- assert!(matches!(alert.send[0], SendTarget::Zendesk { .. }));
- }
-
- #[test]
- fn test_alert_parse_legacy_recipients() {
- let alert = r#"
-sql: |
- SELECT $1::timestamptz;
-recipients:
- - test@example.com
-subject: "[Tamanu Alert] Example ({{ hostname }})"
-template: |
- Server: {{ hostname }}
- There are {{ rows | length }} rows.
-"#;
- let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
- let alert = alert.normalise(&Default::default());
- assert_eq!(alert.interval, std::time::Duration::default());
- assert!(matches!(alert.send[0], SendTarget::Email { .. }));
- }
-}
+mod tests;
diff --git a/crates/bestool/src/actions/tamanu/alerts/command.rs b/crates/bestool/src/actions/tamanu/alerts/command.rs
new file mode 100644
index 0000000..1a18345
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/alerts/command.rs
@@ -0,0 +1,462 @@
+use std::{
+ collections::HashMap,
+ convert::Infallible,
+ env::current_dir,
+ path::{Path, PathBuf},
+ sync::Arc,
+ time::Duration,
+};
+
+use clap::Parser;
+use futures::{future::join_all, TryFutureExt};
+use miette::{Context as _, IntoDiagnostic, Result};
+use tokio::{task::JoinSet, time::timeout};
+use tracing::{debug, error, info, warn};
+use walkdir::WalkDir;
+
+use super::{definition::AlertDefinition, targets::AlertTargets};
+use crate::actions::{
+ tamanu::{config::load_config, find_tamanu, TamanuArgs},
+ Context,
+};
+
+/// Execute alert definitions against Tamanu.
+///
+/// An alert definition is a YAML file that describes a single alert
+/// source and targets to send triggered alerts to.
+///
+/// This tool reads both database and email credentials from Tamanu's own
+/// configuration files, see the tamanu subcommand help (one level above) for
+/// more on how that's determined.
+///
+/// # Example
+///
+/// ```yaml
+/// sql: |
+/// SELECT * FROM fhir.jobs
+/// WHERE error IS NOT NULL
+/// AND created_at > $1
+///
+/// send:
+/// - target: email:
+/// addresses: [alerts@tamanu.io]
+/// subject: "FHIR job errors ({{ hostname }})"
+/// template: |
+/// Automated alert! There have been {{ rows | length }} FHIR jobs
+/// with errors in the past {{ interval }}. Here are the first 5:
+/// {% for row in rows | first(5) %}
+/// - {{ row.topic }}: {{ row.error }}
+/// {% endfor %}
+/// ```
+///
+/// # Template variables
+///
+/// Templates are rendered with [Tera](https://keats.github.io/tera/docs/#templates).
+///
+/// - `rows`: the result of the SQL query, as a list of objects (if source = sql)
+/// - `output`: the result of the shell command (if source = shell)
+/// - `interval`: the duration string of the alert interval
+/// - `hostname`: the hostname of the machine running this command
+/// - `filename`: the name of the alert definition file
+/// - `now`: the current date and time
+///
+/// Additionally you can `{% include "subject" %}` to include the rendering of
+/// the subject template in the email template.
+///
+/// # Sources
+///
+/// Each alert must have one source that it executes to determine whether the
+/// alert is triggered or not. Current sources: `sql`, `shell`.
+///
+/// ## SQL
+///
+/// This source executes a SQL query, which can have a binding for a datetime in
+/// the past to limit results by; returning any rows indicates an alert trigger.
+///
+/// ```yaml
+/// sql: |
+/// SELECT 1 + 1
+/// ```
+///
+/// ### Query binding parameters
+///
+/// The SQL query will be passed exactly the number of parameters it expects.
+/// The parameters are always provided in this order:
+///
+/// - `$1`: the datetime of the start of the interval (timestamp with time zone)
+/// - `$2`: the interval duration (interval)
+///
+/// ## Shell
+///
+/// This source executes a shell script. Returning a non-zero exit code
+/// indicates an alert trigger. The stdout of the script will be the `output`
+/// template variable.
+///
+/// ```yaml
+/// shell: bash
+/// run: |
+/// echo foo
+/// exit 1
+/// ```
+///
+/// # Send targets
+///
+/// You can send triggered alerts to one or more different targets. Current send
+/// targets are: `email`, `slack`, `zendesk`. Note that you can have multiple
+/// targets of the same type.
+///
+/// ## Email
+///
+/// ```yaml
+/// send:
+/// - target: email
+/// addresses:
+/// - staff@job.com
+/// - support@job.com
+/// ```
+///
+/// ## Slack
+///
+/// ```yaml
+/// send:
+/// - target: slack
+/// webhook: https://hooks.slack.com/services/...
+/// template: |
+/// _Alert!_ There are {{ rows | length }} rows with errors.
+/// ```
+///
+/// You can customise the payload sent to Slack by specifying fields:
+///
+/// ```yaml
+/// send:
+/// - target: slack
+/// webhook: https://hooks.slack.com/services/...
+/// # ...
+/// fields:
+/// - name: alertname
+/// field: filename # this will be replaced with the filename of the alert
+/// - name: deployment
+/// value: production # this will be the exact value 'production'
+/// ```
+///
+/// The default set of fields is:
+///
+/// ```yaml
+/// - name: hostname
+/// field: hostname
+/// - name: filename
+/// field: filename
+/// - name: subject
+/// field: subject
+/// - name: message
+/// field: body
+/// ```
+///
+/// Overriding the `fields` will replace the default set entirely (so you may
+/// want to include all the ones you're not changing).
+///
+/// ## Zendesk (authenticated)
+///
+/// ```yaml
+/// send:
+/// - target: zendesk
+/// endpoint: https://example.zendesk.com/api/v2/requests
+/// credentials:
+/// email: foo@example.com
+/// password: pass
+/// ticket_form_id: 500
+/// custom_fields:
+/// - id: 100
+/// value: tamanu_
+/// - id: 200
+/// value: Test
+/// ```
+///
+/// ## Zendesk (anonymous)
+///
+/// ```yaml
+/// send:
+/// - target: zendesk
+/// endpoint: https://example.zendesk.com/api/v2/requests
+/// requester: Name of requester
+/// ticket_form_id: 500
+/// custom_fields:
+/// - id: 100
+/// value: tamanu_
+/// - id: 200
+/// value: Test
+/// ```
+///
+/// ## External targets
+///
+/// It can be tedious to specify and update the same addresses in many different
+/// alert files, especially for more complex send targets. You can create a
+/// `_targets.yml` file in any of the `--dir`s (if there are multiple such files
+/// they will be merged).
+///
+/// ```yaml
+/// targets:
+/// - id: email-staff
+/// target: email
+/// addresses:
+/// - staff@job.com
+/// - id: zendesk-normal
+/// target: zendesk
+/// endpoint: https://...
+/// credentials:
+/// email: the@bear.com
+/// password: ichooseyou
+/// ```
+///
+/// The `subject` and `template` fields are omitted in the `_targets.yml`.
+///
+/// Then in the alerts file, specify `external` targets, with the relevant `id`s
+/// and the `subject` and `template`:
+///
+/// ```yaml
+/// send:
+/// - target: external
+/// id: email-staff
+/// subject: [Alert] Something is wrong
+/// template: |
+/// Whoops
+/// ```
+///
+/// If you specify multiple external targets with the same `id`, the alert will be
+/// multiplexed (i.e. sent to all targets with that `id`). This can be useful for
+/// sending alerts to both email and slack, or for debugging by temporarily sending
+/// alerts to an additional target.
+///
+/// ---
+/// As this documentation is a bit hard to read in the terminal, you may want to
+/// consult the online version:
+///
+#[cfg_attr(docsrs, doc("\n\n**Command**: `bestool tamanu alerts`"))]
+#[derive(Debug, Clone, Parser)]
+#[clap(verbatim_doc_comment)]
+pub struct AlertsArgs {
+ /// Folder containing alert definitions.
+ ///
+ /// This folder will be read recursively for files with the `.yaml` or `.yml` extension.
+ ///
+ /// Files that don't match the expected format will be skipped, as will files with
+ /// `enabled: false` at the top level. Syntax errors will be reported for YAML files.
+ ///
+ /// It's entirely valid to provide a folder that only contains a `_targets.yml` file.
+ ///
+ /// Can be provided multiple times. Defaults to (depending on platform): `C:\Tamanu\alerts`,
+ /// `C:\Tamanu\{current-version}\alerts`, `/opt/tamanu-toolbox/alerts`, `/etc/tamanu/alerts`,
+ /// `/alerts`, and `./alerts`.
+ #[arg(long)]
+ pub dir: Vec,
+
+ /// How far back to look for alerts.
+ ///
+ /// This is a duration string, e.g. `1d` for one day, `1h` for one hour, etc. It should match
+ /// the task scheduling / cron interval for this command.
+ #[arg(long, default_value = "15m")]
+ pub interval: humantime::Duration,
+
+ /// Timeout for each alert.
+ ///
+ /// If an alert takes longer than this to query the database or run the shell script, it will be
+ /// skipped. Defaults to 30 seconds.
+ ///
+ /// This is a duration string, e.g. `1d` for one day, `1h` for one hour, etc.
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `--interval DURATION`"))]
+ #[arg(long, default_value = "30s")]
+ pub timeout: humantime::Duration,
+
+ /// Don't actually send alerts, just print them to stdout.
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `--dry-run`"))]
+ #[arg(long)]
+ pub dry_run: bool,
+}
+
+pub struct InternalContext {
+ pub pg_client: tokio_postgres::Client,
+ pub http_client: reqwest::Client,
+}
+
+async fn default_dirs(root: &Path) -> Vec {
+ let mut dirs = vec![
+ PathBuf::from(r"C:\Tamanu\alerts"),
+ root.join("alerts"),
+ PathBuf::from("/opt/tamanu-toolbox/alerts"),
+ PathBuf::from("/etc/tamanu/alerts"),
+ PathBuf::from("/alerts"),
+ ];
+ if let Ok(cwd) = current_dir() {
+ dirs.push(cwd.join("alerts"));
+ }
+
+ join_all(dirs.into_iter().map(|dir| async {
+ if dir.exists() {
+ Some(dir)
+ } else {
+ None
+ }
+ }))
+ .await
+ .into_iter()
+ .flatten()
+ .collect()
+}
+
+pub async fn run(ctx: Context) -> Result<()> {
+ let (_, root) = find_tamanu(&ctx.args_top)?;
+ let config = load_config(&root, None)?;
+ debug!(?config, "parsed Tamanu config");
+
+ let dirs = if ctx.args_sub.dir.is_empty() {
+ default_dirs(&root).await
+ } else {
+ ctx.args_sub.dir
+ };
+ debug!(?dirs, "searching for alerts");
+
+ let mut alerts = Vec::::new();
+ let mut external_targets = HashMap::new();
+ for dir in dirs {
+ let external_targets_path = dir.join("_targets.yml");
+ if let Some(AlertTargets { targets }) = std::fs::read_to_string(&external_targets_path)
+ .ok()
+ .and_then(|content| {
+ debug!(path=?external_targets_path, "parsing external targets");
+ serde_yml::from_str::(&content)
+ .map_err(
+ |err| warn!(path=?external_targets_path, "_targets.yml has errors! {err}"),
+ )
+ .ok()
+ }) {
+ for target in targets {
+ external_targets
+ .entry(target.id().into())
+ .or_insert(Vec::new())
+ .push(target);
+ }
+ }
+
+ alerts.extend(
+ WalkDir::new(dir)
+ .into_iter()
+ .filter_map(|e| e.ok())
+ .filter(|e| e.file_type().is_file())
+ .map(|entry| {
+ let file = entry.path();
+
+ if !file
+ .extension()
+ .map_or(false, |e| e == "yaml" || e == "yml")
+ {
+ return Ok(None);
+ }
+
+ if file.file_stem().map_or(false, |n| n == "_targets") {
+ return Ok(None);
+ }
+
+ debug!(?file, "parsing YAML file");
+ let content = std::fs::read_to_string(file)
+ .into_diagnostic()
+ .wrap_err(format!("{file:?}"))?;
+ let mut alert: AlertDefinition = serde_yml::from_str(&content)
+ .into_diagnostic()
+ .wrap_err(format!("{file:?}"))?;
+
+ alert.file = file.to_path_buf();
+ alert.interval = ctx.args_sub.interval.into();
+ debug!(?alert, "parsed alert file");
+ Ok(if alert.enabled { Some(alert) } else { None })
+ })
+ .filter_map(|def: Result>| match def {
+ Err(err) => {
+ error!("{err:?}");
+ None
+ }
+ Ok(def) => def,
+ }),
+ );
+ }
+
+ if alerts.is_empty() {
+ info!("no alerts found, doing nothing");
+ return Ok(());
+ }
+
+ if !external_targets.is_empty() {
+ debug!(count=%external_targets.len(), "found some external targets");
+ }
+
+ for alert in &mut alerts {
+ *alert = std::mem::take(alert).normalise(&external_targets);
+ }
+ debug!(count=%alerts.len(), "found some alerts");
+
+ let mut pg_config = tokio_postgres::Config::default();
+ pg_config.application_name(&format!(
+ "{}/{} (tamanu alerts)",
+ env!("CARGO_PKG_NAME"),
+ env!("CARGO_PKG_VERSION")
+ ));
+ if let Some(host) = &config.db.host {
+ pg_config.host(host);
+ } else {
+ pg_config.host("localhost");
+ }
+ pg_config.user(&config.db.username);
+ pg_config.password(&config.db.password);
+ pg_config.dbname(&config.db.name);
+ info!(config=?pg_config, "connecting to Tamanu database");
+ let (client, connection) = pg_config
+ .connect(tokio_postgres::NoTls)
+ .await
+ .into_diagnostic()?;
+ tokio::spawn(async move {
+ if let Err(e) = connection.await {
+ eprintln!("connection error: {}", e);
+ }
+ });
+
+ let mailgun = Arc::new(config.mailgun);
+ let internal_ctx = Arc::new(InternalContext {
+ pg_client: client,
+ http_client: reqwest::Client::new(),
+ });
+
+ let mut set = JoinSet::new();
+ for alert in alerts {
+ let internal_ctx = internal_ctx.clone();
+ let dry_run = ctx.args_sub.dry_run;
+ let mailgun = mailgun.clone();
+ let timeout_d: Duration = ctx.args_sub.timeout.into();
+ let name = alert.file.clone();
+ set.spawn(
+ timeout(timeout_d, async move {
+ let error = format!("while executing alert: {}", alert.file.display());
+ if let Err(err) = alert
+ .execute(internal_ctx, mailgun, dry_run)
+ .await
+ .wrap_err(error)
+ {
+ eprintln!("{err:?}");
+ }
+ })
+ .or_else(move |elapsed| async move {
+ error!(alert=?name, "timeout: {elapsed:?}");
+ Ok::<_, Infallible>(())
+ }),
+ );
+ }
+
+ while let Some(res) = set.join_next().await {
+ match res {
+ Err(err) => {
+ error!("task: {err:?}");
+ }
+ _ => (),
+ }
+ }
+
+ Ok(())
+}
diff --git a/crates/bestool/src/actions/tamanu/alerts/definition.rs b/crates/bestool/src/actions/tamanu/alerts/definition.rs
new file mode 100644
index 0000000..5799cee
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/alerts/definition.rs
@@ -0,0 +1,330 @@
+use std::{
+ collections::HashMap, io::Write, ops::ControlFlow, path::PathBuf, process::Stdio, sync::Arc,
+ time::Duration,
+};
+
+use chrono::{DateTime, Utc};
+use mailgun_rs::{EmailAddress, Mailgun, Message};
+use miette::{miette, Context as _, IntoDiagnostic, Result};
+use serde_json::json;
+use tera::Context as TeraCtx;
+use tokio::io::AsyncReadExt as _;
+use tokio_postgres::types::ToSql;
+use tracing::{debug, error, info, instrument, warn};
+
+use crate::{actions::tamanu::config, postgres_to_value::rows_to_value_map};
+
+use super::{
+ pg_interval::Interval,
+ targets::{ExternalTarget, SendTarget, TargetEmail},
+ targets::{SlackField, TargetSlack, TargetZendesk, ZendeskCredentials, ZendeskMethod},
+ templates::build_context,
+ templates::{load_templates, render_alert},
+ InternalContext,
+};
+
+fn enabled() -> bool {
+ true
+}
+
+#[derive(serde::Deserialize, Debug, Default)]
+pub struct AlertDefinition {
+ #[serde(default, skip)]
+ pub file: PathBuf,
+
+ #[serde(default = "enabled")]
+ pub enabled: bool,
+ #[serde(skip)]
+ pub interval: Duration,
+ #[serde(default)]
+ pub send: Vec,
+
+ #[serde(flatten)]
+ pub source: TicketSource,
+
+ // legacy email-only fields
+ #[serde(default)]
+ pub recipients: Vec,
+ pub subject: Option,
+ pub template: Option,
+}
+
+#[derive(serde::Deserialize, Debug, Default)]
+#[serde(untagged, deny_unknown_fields)]
+pub enum TicketSource {
+ Sql {
+ sql: String,
+ },
+ Shell {
+ shell: String,
+ run: String,
+ },
+
+ #[default]
+ None,
+}
+
+impl AlertDefinition {
+ pub fn normalise(mut self, external_targets: &HashMap>) -> Self {
+ if !self.recipients.is_empty() {
+ self.send.push(SendTarget::Email {
+ subject: self.subject,
+ template: self.template.unwrap_or_default(),
+ conn: TargetEmail {
+ addresses: self.recipients,
+ },
+ });
+ self.recipients = vec![];
+ self.subject = None;
+ self.template = None;
+ }
+
+ self.send = self
+ .send
+ .iter()
+ .flat_map(|target| match target {
+ target @ SendTarget::External { id, .. } => target
+ .resolve_external(external_targets)
+ .unwrap_or_else(|| {
+ error!(id, "external target not found");
+ Vec::new()
+ }),
+ other => vec![other.clone()],
+ })
+ .collect();
+
+ self
+ }
+
+ #[instrument(skip(self, client, not_before, context))]
+ pub async fn read_sources(
+ &self,
+ client: &tokio_postgres::Client,
+ not_before: DateTime,
+ context: &mut TeraCtx,
+ ) -> Result> {
+ match &self.source {
+ TicketSource::None => {
+ debug!(?self.file, "no source, skipping");
+ return Ok(ControlFlow::Break(()));
+ }
+ TicketSource::Sql { sql } => {
+ let statement = client.prepare(sql).await.into_diagnostic()?;
+
+ let interval = Interval(self.interval);
+ let all_params: Vec<&(dyn ToSql + Sync)> = vec![¬_before, &interval];
+
+ let rows = client
+ .query(&statement, &all_params[..statement.params().len()])
+ .await
+ .into_diagnostic()
+ .wrap_err("querying database")?;
+
+ if rows.is_empty() {
+ debug!(?self.file, "no rows returned, skipping");
+ return Ok(ControlFlow::Break(()));
+ }
+ info!(?self.file, rows=%rows.len(), "alert triggered");
+
+ let context_rows = rows_to_value_map(&rows);
+
+ context.insert("rows", &context_rows);
+ }
+ TicketSource::Shell { shell, run } => {
+ let mut script = tempfile::Builder::new().tempfile().into_diagnostic()?;
+ write!(script.as_file_mut(), "{run}").into_diagnostic()?;
+
+ let mut shell = tokio::process::Command::new(shell)
+ .arg(script.path())
+ .stdin(Stdio::null())
+ .stdout(Stdio::piped())
+ .spawn()
+ .into_diagnostic()?;
+
+ let mut output = Vec::new();
+ let mut stdout = shell
+ .stdout
+ .take()
+ .ok_or_else(|| miette!("getting the child stdout handle"))?;
+ let output_future =
+ futures::future::try_join(shell.wait(), stdout.read_to_end(&mut output));
+
+ let Ok(res) = tokio::time::timeout(self.interval, output_future).await else {
+ warn!(?self.file, "the script timed out, skipping");
+ shell.kill().await.into_diagnostic()?;
+ return Ok(ControlFlow::Break(()));
+ };
+
+ let (status, output_size) = res.into_diagnostic().wrap_err("running the shell")?;
+
+ if status.success() {
+ debug!(?self.file, "the script succeeded, skipping");
+ return Ok(ControlFlow::Break(()));
+ }
+ info!(?self.file, ?status, ?output_size, "alert triggered");
+
+ context.insert("output", &String::from_utf8_lossy(&output));
+ }
+ }
+ Ok(ControlFlow::Continue(()))
+ }
+
+ pub async fn execute(
+ self,
+ ctx: Arc,
+ mailgun: Arc,
+ dry_run: bool,
+ ) -> Result<()> {
+ info!(?self.file, "executing alert");
+
+ let now = crate::now_time(&chrono::Utc);
+ let not_before = now - self.interval;
+ info!(?now, ?not_before, interval=?self.interval, "date range for alert");
+
+ let mut tera_ctx = build_context(&self, now);
+ if self
+ .read_sources(&ctx.pg_client, not_before, &mut tera_ctx)
+ .await?
+ .is_break()
+ {
+ return Ok(());
+ }
+
+ for target in &self.send {
+ let tera = load_templates(target)?;
+ let (subject, body, requester) = render_alert(&tera, &mut tera_ctx)?;
+
+ match target {
+ SendTarget::Email {
+ conn: TargetEmail { addresses },
+ ..
+ } => {
+ if dry_run {
+ println!("-------------------------------");
+ println!("Alert: {}", self.file.display());
+ println!("Recipients: {}", addresses.join(", "));
+ println!("Subject: {subject}");
+ println!("Body: {body}");
+ continue;
+ }
+
+ debug!(?self.recipients, "sending email");
+ let sender = EmailAddress::address(&mailgun.sender);
+ let mailgun = Mailgun {
+ api_key: mailgun.api_key.clone(),
+ domain: mailgun.domain.clone(),
+ };
+ let message = Message {
+ to: addresses
+ .iter()
+ .map(|email| EmailAddress::address(email))
+ .collect(),
+ subject,
+ html: body,
+ ..Default::default()
+ };
+ mailgun
+ .async_send(mailgun_rs::MailgunRegion::US, &sender, message)
+ .await
+ .into_diagnostic()
+ .wrap_err("sending email")?;
+ }
+
+ SendTarget::Slack {
+ conn: TargetSlack { webhook, fields },
+ ..
+ } => {
+ if dry_run {
+ println!("-------------------------------");
+ println!("Alert: {}", self.file.display());
+ println!("Recipients: slack");
+ println!("Subject: {subject}");
+ println!("Body: {body}");
+ continue;
+ }
+
+ let payload: HashMap<&String, String> = fields
+ .iter()
+ .map(|field| match field {
+ SlackField::Fixed { name, value } => (name, value.clone()),
+ SlackField::Field { name, field } => (
+ name,
+ tera.render(field.as_str(), &tera_ctx)
+ .ok()
+ .or_else(|| {
+ tera_ctx.get(field.as_str()).map(|v| match v.as_str() {
+ Some(t) => t.to_owned(),
+ None => v.to_string(),
+ })
+ })
+ .unwrap_or_default(),
+ ),
+ })
+ .collect();
+
+ debug!(?webhook, ?payload, "posting to slack webhook");
+ ctx.http_client
+ .post(webhook.clone())
+ .json(&payload)
+ .send()
+ .await
+ .into_diagnostic()
+ .wrap_err("posting to slack webhook")?;
+ }
+
+ SendTarget::Zendesk {
+ conn:
+ TargetZendesk {
+ endpoint,
+ method,
+ ticket_form_id,
+ custom_fields,
+ },
+ ..
+ } => {
+ if dry_run {
+ println!("-------------------------------");
+ println!("Alert: {}", self.file.display());
+ println!("Endpoint: {}", endpoint);
+ println!("Subject: {subject}");
+ println!("Body: {body}");
+ continue;
+ }
+
+ let req = json!({
+ "request": {
+ "subject": subject,
+ "ticket_form_id": ticket_form_id,
+ "custom_fields": custom_fields,
+ "comment": { "html_body": body },
+ "requester": requester.map(|r| json!({ "name": r }))
+ }
+ });
+
+ let mut req_builder = ctx.http_client.post(endpoint.clone()).json(&req);
+
+ if let ZendeskMethod::Authorized {
+ credentials: ZendeskCredentials { email, password },
+ } = method
+ {
+ req_builder = req_builder
+ .basic_auth(std::format_args!("{email}/token"), Some(password));
+ }
+
+ req_builder
+ .send()
+ .await
+ .into_diagnostic()
+ .wrap_err("creating Zendesk ticket")?;
+ debug!("Zendesk ticket sent");
+ }
+
+ SendTarget::External { .. } => {
+ unreachable!("external targets should be resolved before here");
+ }
+ }
+ }
+
+ Ok(())
+ }
+}
diff --git a/crates/bestool/src/actions/tamanu/alerts/pg_interval.rs b/crates/bestool/src/actions/tamanu/alerts/pg_interval.rs
new file mode 100644
index 0000000..b54c8cf
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/alerts/pg_interval.rs
@@ -0,0 +1,23 @@
+use std::{error::Error, time::Duration};
+
+use bytes::{BufMut, BytesMut};
+use miette::Result;
+use tokio_postgres::types::{IsNull, ToSql, Type};
+
+#[derive(Debug)]
+pub struct Interval(pub Duration);
+
+impl ToSql for Interval {
+ fn to_sql(&self, _: &Type, out: &mut BytesMut) -> Result> {
+ out.put_i64(self.0.as_micros().try_into().unwrap_or_default());
+ out.put_i32(0);
+ out.put_i32(0);
+ Ok(IsNull::No)
+ }
+
+ fn accepts(ty: &Type) -> bool {
+ matches!(*ty, Type::INTERVAL)
+ }
+
+ tokio_postgres::types::to_sql_checked!();
+}
diff --git a/crates/bestool/src/actions/tamanu/alerts/targets.rs b/crates/bestool/src/actions/tamanu/alerts/targets.rs
new file mode 100644
index 0000000..f9f2267
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/alerts/targets.rs
@@ -0,0 +1,185 @@
+use std::collections::HashMap;
+
+use reqwest::Url;
+
+use super::templates::TemplateField;
+
+#[derive(serde::Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case", tag = "target")]
+pub enum SendTarget {
+ Email {
+ subject: Option,
+ template: String,
+ #[serde(flatten)]
+ conn: TargetEmail,
+ },
+ Zendesk {
+ subject: Option,
+ template: String,
+ #[serde(flatten)]
+ conn: TargetZendesk,
+ },
+ Slack {
+ subject: Option,
+ template: String,
+ #[serde(flatten)]
+ conn: TargetSlack,
+ },
+ External {
+ subject: Option,
+ template: String,
+ id: String,
+ },
+}
+
+impl SendTarget {
+ pub fn resolve_external(
+ &self,
+ external_targets: &HashMap>,
+ ) -> Option> {
+ match self {
+ Self::External {
+ id,
+ subject,
+ template,
+ } => external_targets.get(id).map(|exts| {
+ exts.iter()
+ .map(|ext| match ext {
+ ExternalTarget::Email { conn, .. } => SendTarget::Email {
+ subject: subject.clone(),
+ template: template.clone(),
+ conn: conn.clone(),
+ },
+ ExternalTarget::Zendesk { conn, .. } => SendTarget::Zendesk {
+ subject: subject.clone(),
+ template: template.clone(),
+ conn: conn.clone(),
+ },
+ ExternalTarget::Slack { conn, .. } => SendTarget::Slack {
+ subject: subject.clone(),
+ template: template.clone(),
+ conn: conn.clone(),
+ },
+ })
+ .collect()
+ }),
+ _ => None,
+ }
+ }
+}
+
+#[derive(serde::Deserialize, Debug)]
+pub struct AlertTargets {
+ pub targets: Vec,
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+#[serde(rename_all = "snake_case", tag = "target")]
+pub enum ExternalTarget {
+ Email {
+ id: String,
+ #[serde(flatten)]
+ conn: TargetEmail,
+ },
+ Zendesk {
+ id: String,
+ #[serde(flatten)]
+ conn: TargetZendesk,
+ },
+ Slack {
+ id: String,
+ #[serde(flatten)]
+ conn: TargetSlack,
+ },
+}
+
+impl ExternalTarget {
+ pub fn id(&self) -> &str {
+ match self {
+ Self::Email { id, .. } => id,
+ Self::Zendesk { id, .. } => id,
+ Self::Slack { id, .. } => id,
+ }
+ }
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+#[serde(rename_all = "snake_case")]
+pub struct TargetEmail {
+ pub addresses: Vec,
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+#[serde(rename_all = "snake_case")]
+pub struct TargetZendesk {
+ pub endpoint: Url,
+
+ #[serde(flatten)]
+ pub method: ZendeskMethod,
+
+ pub ticket_form_id: Option,
+
+ #[serde(default)]
+ pub custom_fields: Vec,
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+#[serde(rename_all = "snake_case")]
+pub struct TargetSlack {
+ pub webhook: Url,
+
+ #[serde(default = "SlackField::default_set")]
+ pub fields: Vec,
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+#[serde(untagged, rename_all = "snake_case")]
+pub enum SlackField {
+ Fixed { name: String, value: String },
+ Field { name: String, field: TemplateField },
+}
+
+impl SlackField {
+ pub fn default_set() -> Vec {
+ vec![
+ Self::Field {
+ name: "hostname".into(),
+ field: TemplateField::Hostname,
+ },
+ Self::Field {
+ name: "filename".into(),
+ field: TemplateField::Filename,
+ },
+ Self::Field {
+ name: "subject".into(),
+ field: TemplateField::Subject,
+ },
+ Self::Field {
+ name: "message".into(),
+ field: TemplateField::Body,
+ },
+ ]
+ }
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+#[serde(untagged, deny_unknown_fields)]
+pub enum ZendeskMethod {
+ // Make credentials and requester fields exclusive as specifying the requester object in authorized
+ // request is invalid. We may be able to specify some account as the requester, but it's not
+ // necessary. That's because the requester defaults to the authenticated account.
+ Authorized { credentials: ZendeskCredentials },
+ Anonymous { requester: String },
+}
+
+#[derive(serde::Deserialize, Clone, Debug)]
+pub struct ZendeskCredentials {
+ pub email: String,
+ pub password: String,
+}
+
+#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
+pub struct ZendeskCustomField {
+ pub id: u64,
+ pub value: String,
+}
diff --git a/crates/bestool/src/actions/tamanu/alerts/templates.rs b/crates/bestool/src/actions/tamanu/alerts/templates.rs
new file mode 100644
index 0000000..91e0df0
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/alerts/templates.rs
@@ -0,0 +1,143 @@
+use std::fmt::Display;
+
+use folktime::duration::{Duration as Folktime, Style as FolkStyle};
+
+use miette::{Context as _, IntoDiagnostic, Result};
+
+use sysinfo::System;
+use tera::{Context as TeraCtx, Tera};
+use tracing::{instrument, warn};
+
+use super::{
+ definition::AlertDefinition,
+ targets::{SendTarget, TargetZendesk, ZendeskMethod},
+};
+
+const DEFAULT_SUBJECT_TEMPLATE: &str = "[Tamanu Alert] {{ filename }} ({{ hostname }})";
+
+#[derive(serde::Deserialize, Clone, Copy, Debug)]
+#[serde(rename_all = "snake_case")]
+pub enum TemplateField {
+ Filename,
+ Subject,
+ Body,
+ Hostname,
+ Requester,
+ Interval,
+}
+
+impl TemplateField {
+ pub fn as_str(self) -> &'static str {
+ match self {
+ Self::Filename => "filename",
+ Self::Subject => "subject",
+ Self::Body => "body",
+ Self::Hostname => "hostname",
+ Self::Requester => "requester",
+ Self::Interval => "interval",
+ }
+ }
+}
+
+impl Display for TemplateField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.as_str())
+ }
+}
+
+#[instrument]
+pub fn load_templates(target: &SendTarget) -> Result {
+ let mut tera = tera::Tera::default();
+
+ match target {
+ SendTarget::Email {
+ subject, template, ..
+ }
+ | SendTarget::Zendesk {
+ subject, template, ..
+ }
+ | SendTarget::Slack {
+ subject, template, ..
+ }
+ | SendTarget::External {
+ subject, template, ..
+ } => {
+ tera.add_raw_template(
+ TemplateField::Subject.as_str(),
+ subject.as_deref().unwrap_or(DEFAULT_SUBJECT_TEMPLATE),
+ )
+ .into_diagnostic()
+ .wrap_err("compiling subject template")?;
+ tera.add_raw_template(TemplateField::Body.as_str(), &template)
+ .into_diagnostic()
+ .wrap_err("compiling body template")?;
+ }
+ }
+
+ if let SendTarget::Zendesk {
+ conn: TargetZendesk {
+ method: ZendeskMethod::Anonymous { requester },
+ ..
+ },
+ ..
+ } = target
+ {
+ tera.add_raw_template(TemplateField::Requester.as_str(), requester)
+ .into_diagnostic()
+ .wrap_err("compiling requester template")?;
+ }
+ Ok(tera)
+}
+
+#[instrument(skip(alert, now))]
+pub fn build_context(alert: &AlertDefinition, now: chrono::DateTime) -> TeraCtx {
+ let mut context = TeraCtx::new();
+ context.insert(
+ TemplateField::Interval.as_str(),
+ &format!(
+ "{}",
+ Folktime::new(alert.interval).with_style(FolkStyle::OneUnitWhole)
+ ),
+ );
+ context.insert(
+ TemplateField::Hostname.as_str(),
+ System::host_name().as_deref().unwrap_or("unknown"),
+ );
+ context.insert(
+ TemplateField::Filename.as_str(),
+ &alert.file.file_name().unwrap().to_string_lossy(),
+ );
+ context.insert("now", &now.to_string());
+
+ context
+}
+
+#[instrument(skip(tera, context))]
+pub fn render_alert(
+ tera: &Tera,
+ context: &mut TeraCtx,
+) -> Result<(String, String, Option)> {
+ let subject = tera
+ .render(TemplateField::Subject.as_str(), &context)
+ .into_diagnostic()
+ .wrap_err("rendering subject template")?;
+
+ context.insert(TemplateField::Subject.as_str(), &subject.to_string());
+
+ let body = tera
+ .render(TemplateField::Body.as_str(), &context)
+ .into_diagnostic()
+ .wrap_err("rendering email template")?;
+
+ let requester = tera
+ .render(TemplateField::Requester.as_str(), &context)
+ .map(Some)
+ .or_else(|err| match err.kind {
+ tera::ErrorKind::TemplateNotFound(_) => Ok(None),
+ _ => Err(err),
+ })
+ .into_diagnostic()
+ .wrap_err("rendering requester template")?;
+
+ Ok((subject, body, requester))
+}
diff --git a/crates/bestool/src/actions/tamanu/alerts/tests.rs b/crates/bestool/src/actions/tamanu/alerts/tests.rs
new file mode 100644
index 0000000..6e107bb
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/alerts/tests.rs
@@ -0,0 +1,189 @@
+use std::path::PathBuf;
+
+use chrono::{Duration, Utc};
+
+use super::{
+ definition::{AlertDefinition, TicketSource},
+ targets::SendTarget,
+ templates::build_context,
+};
+
+fn interval_context(dur: Duration) -> Option {
+ let alert = AlertDefinition {
+ file: PathBuf::from("test.yaml"),
+ enabled: true,
+ interval: dur.to_std().unwrap(),
+ source: TicketSource::Sql { sql: "".into() },
+ send: vec![],
+ recipients: vec![],
+ subject: None,
+ template: None,
+ };
+ build_context(&alert, Utc::now())
+ .get("interval")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_owned())
+}
+
+#[test]
+fn test_interval_format_minutes() {
+ assert_eq!(
+ interval_context(Duration::minutes(15)).as_deref(),
+ Some("15m"),
+ );
+}
+
+#[test]
+fn test_interval_format_hour() {
+ assert_eq!(interval_context(Duration::hours(1)).as_deref(), Some("1h"),);
+}
+
+#[test]
+fn test_interval_format_day() {
+ assert_eq!(interval_context(Duration::days(1)).as_deref(), Some("1d"),);
+}
+
+#[test]
+fn test_alert_parse_email() {
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+send:
+- target: email
+ addresses: [test@example.com]
+ subject: "[Tamanu Alert] Example ({{ hostname }})"
+ template: |
+ Server: {{ hostname }}
+ There are {{ rows | length }} rows.
+"#;
+ let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
+ let alert = alert.normalise(&Default::default());
+ assert_eq!(alert.interval, std::time::Duration::default());
+ assert!(matches!(alert.source, TicketSource::Sql { sql } if sql == "SELECT $1::timestamptz;"));
+ assert!(matches!(alert.send[0], SendTarget::Email { .. }));
+}
+
+#[test]
+fn test_alert_parse_shell() {
+ let alert = r#"
+shell: bash
+run: echo foobar
+"#;
+ let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
+ let alert = alert.normalise(&Default::default());
+ assert_eq!(alert.interval, std::time::Duration::default());
+ assert!(
+ matches!(alert.source, TicketSource::Shell { shell, run } if shell == "bash" && run == "echo foobar")
+ );
+}
+
+#[test]
+fn test_alert_parse_invalid_source() {
+ let alert = r#"
+shell: bash
+"#;
+ assert!(matches!(
+ serde_yml::from_str::(&alert),
+ Err(_)
+ ));
+ let alert = r#"
+run: echo foo
+"#;
+ assert!(matches!(
+ serde_yml::from_str::(&alert),
+ Err(_)
+ ));
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+run: echo foo
+"#;
+ assert!(matches!(
+ serde_yml::from_str::(&alert),
+ Err(_)
+ ));
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+shell: bash
+"#;
+ assert!(matches!(
+ serde_yml::from_str::(&alert),
+ Err(_)
+ ));
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+shell: bash
+run: echo foo
+"#;
+ assert!(matches!(
+ serde_yml::from_str::(&alert),
+ Err(_)
+ ));
+}
+
+#[test]
+fn test_alert_parse_zendesk_authorized() {
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+send:
+- target: zendesk
+ endpoint: https://example.zendesk.com/api/v2/requests
+ credentials:
+ email: foo@example.com
+ password: pass
+ subject: "[Tamanu Alert] Example ({{ hostname }})"
+ template: "Output: {{ output }}""#;
+ let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
+ assert!(matches!(alert.send[0], SendTarget::Zendesk { .. }));
+}
+
+#[test]
+fn test_alert_parse_zendesk_anon() {
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+send:
+- target: zendesk
+ endpoint: https://example.zendesk.com/api/v2/requests
+ requester: "{{ hostname }}"
+ subject: "[Tamanu Alert] Example ({{ hostname }})"
+ template: "Output: {{ output }}""#;
+ let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
+ assert!(matches!(alert.send[0], SendTarget::Zendesk { .. }));
+}
+
+#[test]
+fn test_alert_parse_zendesk_form_fields() {
+ let alert = r#"
+sql: SELECT $1::timestamptz;
+send:
+- target: zendesk
+ endpoint: https://example.zendesk.com/api/v2/requests
+ requester: "{{ hostname }}"
+ subject: "[Tamanu Alert] Example ({{ hostname }})"
+ template: "Output: {{ output }}"
+ ticket_form_id: 500
+ custom_fields:
+ - id: 100
+ value: tamanu_
+ - id: 200
+ value: Test
+"#;
+ let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
+ assert!(matches!(alert.send[0], SendTarget::Zendesk { .. }));
+}
+
+#[test]
+fn test_alert_parse_legacy_recipients() {
+ let alert = r#"
+sql: |
+ SELECT $1::timestamptz;
+recipients:
+ - test@example.com
+subject: "[Tamanu Alert] Example ({{ hostname }})"
+template: |
+ Server: {{ hostname }}
+ There are {{ rows | length }} rows.
+"#;
+ let alert: AlertDefinition = serde_yml::from_str(&alert).unwrap();
+ let alert = alert.normalise(&Default::default());
+ assert_eq!(alert.interval, std::time::Duration::default());
+ assert!(matches!(alert.send[0], SendTarget::Email { .. }));
+}
diff --git a/crates/bestool/src/actions/tamanu/backup.rs b/crates/bestool/src/actions/tamanu/backup.rs
index 51ae5a2..9fc685d 100644
--- a/crates/bestool/src/actions/tamanu/backup.rs
+++ b/crates/bestool/src/actions/tamanu/backup.rs
@@ -20,12 +20,14 @@ use tracing::{debug, info, instrument, warn};
use crate::{
actions::{
- tamanu::{config::load_config, find_package, find_postgres_bin, find_tamanu, TamanuArgs},
+ tamanu::{config::load_config, find_postgres_bin, find_tamanu, TamanuArgs},
Context,
},
now_time,
};
+use super::config::TamanuConfig;
+
/// Backup a local Tamanu database to a single file.
///
/// This finds the database from the Tamanu's configuration. The output will be written to a file
@@ -96,20 +98,6 @@ pub struct BackupArgs {
pub key: KeyArgs,
}
-#[derive(serde::Deserialize, Debug)]
-#[serde(rename_all = "camelCase")]
-pub struct TamanuConfig {
- pub canonical_host_name: String,
- pub db: TamanuDb,
-}
-
-#[derive(serde::Deserialize, Debug)]
-pub struct TamanuDb {
- pub name: String,
- pub username: String,
- pub password: String,
-}
-
pub async fn run(ctx: Context) -> Result<()> {
create_dir_all(&ctx.args_sub.write_to)
.await
@@ -117,16 +105,11 @@ pub async fn run(ctx: Context) -> Result<()> {
.wrap_err("creating dest dir")?;
let (_, root) = find_tamanu(&ctx.args_top)?;
- let kind = find_package(&root);
- let config_value = load_config(&root, kind.package_name())?;
+ let config = load_config(&root, None)?;
+ debug!(?config, "parsed Tamanu config");
let pg_dump = find_postgres_bin("pg_dump")?;
- let config: TamanuConfig = serde_json::from_value(config_value)
- .into_diagnostic()
- .wrap_err("parsing of Tamanu config failed")?;
- debug!(?config, "parsed Tamanu config");
-
// check key
ctx.args_sub.key.get_public_key().await?;
diff --git a/crates/bestool/src/actions/tamanu/backup_configs.rs b/crates/bestool/src/actions/tamanu/backup_configs.rs
index 69e31d0..5165133 100644
--- a/crates/bestool/src/actions/tamanu/backup_configs.rs
+++ b/crates/bestool/src/actions/tamanu/backup_configs.rs
@@ -16,16 +16,15 @@ use zip::{write::SimpleFileOptions, CompressionMethod, ZipWriter};
use crate::{
actions::{
tamanu::{
- backup::TamanuConfig,
config::{find_config_dir, load_config},
- find_package, find_tamanu, TamanuArgs,
+ find_tamanu, TamanuArgs,
},
Context,
},
now_time,
};
-use super::backup::process_backup;
+use super::{backup::process_backup, config::TamanuConfig, find_package};
/// Backup local Tamanu-related config files to a zip archive.
///
@@ -175,11 +174,7 @@ pub async fn run(ctx: Context) -> Result<()> {
let (_, root) = find_tamanu(&ctx.args_top)?;
let kind = find_package(&root);
- let config_value = load_config(&root, kind.package_name())?;
-
- let config: TamanuConfig = serde_json::from_value(config_value)
- .into_diagnostic()
- .wrap_err("parsing tamanu config")?;
+ let config = load_config(&root, Some(kind.package_name()))?;
let output = Path::new(&ctx.args_sub.write_to).join(make_backup_filename(&config));
@@ -207,7 +202,7 @@ pub async fn run(ctx: Context) -> Result<()> {
add_file(&mut zip, root.join("pm2.config.cjs"), "pm2.config.cjs");
add_dir(&mut zip, root.join("alerts"), "alerts/version")?;
add_dir(&mut zip, r"C:\Tamanu\alerts", "alerts/global")?;
- if let Some(path) = find_config_dir(&root, kind.package_name(), ".") {
+ if let Some(path) = find_config_dir(&root, Some(kind.package_name()), ".") {
add_dir(&mut zip, path, kind.package_name())?;
}
diff --git a/crates/bestool/src/actions/tamanu/config.rs b/crates/bestool/src/actions/tamanu/config.rs
index 93237e8..a75407d 100644
--- a/crates/bestool/src/actions/tamanu/config.rs
+++ b/crates/bestool/src/actions/tamanu/config.rs
@@ -1,162 +1,7 @@
-use std::{
- fs::File,
- io::Read,
- path::{Path, PathBuf},
-};
+pub use command::*;
+pub use loader::*;
+pub use structure::*;
-use clap::Parser;
-use miette::{bail, IntoDiagnostic, Result, WrapErr};
-use tracing::{debug, instrument};
-
-use crate::actions::Context;
-
-use super::{find_tamanu, TamanuArgs};
-
-/// Find and print the current Tamanu config.
-#[cfg_attr(docsrs, doc("\n\n**Command**: `bestool tamanu config`"))]
-#[derive(Debug, Clone, Parser)]
-pub struct ConfigArgs {
- /// Package to look at
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-p, --package central|facility`"))]
- #[arg(short, long)]
- pub package: String,
-
- /// Print compact JSON instead of pretty
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-c, --compact`"))]
- #[arg(short, long)]
- pub compact: bool,
-
- /// Print null if key not found
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-n, --or-null`"))]
- #[arg(short = 'n', long)]
- pub or_null: bool,
-
- /// Path to a subkey
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-k, --key`"))]
- #[arg(short, long)]
- pub key: Option,
-
- /// If the value is a string, print it directly (without quotes)
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-r, --raw`"))]
- #[arg(short, long)]
- pub raw: bool,
-}
-
-#[instrument(level = "debug")]
-pub fn load_config(root: &Path, package: &str) -> Result {
- let mut config = package_config(root, package, "default.json5")
- .transpose()?
- .unwrap_or_else(|| serde_json::Value::Object(Default::default()));
-
- if let Ok(env_name) = std::env::var("NODE_ENV") {
- if let Some(env_config) =
- package_config(root, package, &format!("{env_name}.json5")).transpose()?
- {
- config = merge_json(config, env_config);
- }
- } else {
- if let Some(env_config) = package_config(root, package, "production.json5").transpose()? {
- config = merge_json(config, env_config);
- }
- }
-
- if let Some(local_config) = package_config(root, package, "local.json5").transpose()? {
- config = merge_json(config, local_config);
- }
-
- Ok(config)
-}
-
-pub async fn run(ctx: Context) -> Result<()> {
- let (_, root) = find_tamanu(&ctx.args_top)?;
-
- let config = load_config(&root, &ctx.args_sub.package)?;
-
- let value = if let Some(key) = &ctx.args_sub.key {
- let mut value = &config;
- for part in key.split('.') {
- value = match value.get(part) {
- Some(value) => value,
- None if ctx.args_sub.or_null => &serde_json::Value::Null,
- None => bail!("key not found: {:?}", key),
- };
- }
- value
- } else {
- &config
- };
-
- println!(
- "{}",
- match (ctx.args_sub, value.as_str()) {
- (ConfigArgs { raw: true, .. }, Some(string)) => {
- string.into()
- }
- (ConfigArgs { compact: true, .. }, _) => {
- serde_json::to_string(&value).into_diagnostic()?
- }
- _ => serde_json::to_string_pretty(&value).into_diagnostic()?,
- }
- );
-
- Ok(())
-}
-
-#[instrument(level = "debug")]
-pub fn find_config_dir(root: &Path, package: &str, file: &str) -> Option {
- // Windows installs
- let path = root
- .join("packages")
- .join(package)
- .join("config")
- .join(file);
- if path.exists() {
- return Some(path);
- }
-
- // Linux installs
- let path = root.join(file);
- if path.exists() {
- return Some(path);
- }
-
- None
-}
-
-#[instrument(level = "debug")]
-pub fn package_config(root: &Path, package: &str, file: &str) -> Option> {
- fn inner(path: &Path) -> Result {
- debug!(?path, "opening config file");
- let mut file = File::open(path).into_diagnostic()?;
-
- let mut contents = String::new();
- let bytes = file.read_to_string(&mut contents).into_diagnostic()?;
- debug!(%bytes, "read config file");
-
- let config: serde_json::Value = json5::from_str(&contents).into_diagnostic()?;
- Ok(config)
- }
-
- find_config_dir(root, package, file)
- .map(|path| inner(&path).wrap_err(path.to_string_lossy().into_owned()))
-}
-
-#[instrument(level = "trace")]
-pub fn merge_json(
- mut base: serde_json::Value,
- mut overlay: serde_json::Value,
-) -> serde_json::Value {
- if let (Some(base), Some(overlay)) = (base.as_object_mut(), overlay.as_object_mut()) {
- for (key, value) in overlay {
- if let Some(base_value) = base.get_mut(key) {
- *base_value = merge_json(base_value.clone(), value.clone());
- } else {
- base.insert(key.clone(), value.clone());
- }
- }
- } else {
- // If either or both of `base` and `overlay` are scalar values, it must be safe to simply overwrite the base.
- base = overlay
- }
- base
-}
+mod command;
+mod loader;
+mod structure;
diff --git a/crates/bestool/src/actions/tamanu/config/command.rs b/crates/bestool/src/actions/tamanu/config/command.rs
new file mode 100644
index 0000000..c1cdcef
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/config/command.rs
@@ -0,0 +1,74 @@
+use clap::Parser;
+use miette::{bail, IntoDiagnostic, Result};
+
+use crate::actions::{
+ tamanu::{find_tamanu, TamanuArgs},
+ Context,
+};
+
+/// Find and print the current Tamanu config.
+#[cfg_attr(docsrs, doc("\n\n**Command**: `bestool tamanu config`"))]
+#[derive(Debug, Clone, Parser)]
+pub struct ConfigArgs {
+ /// Package to look at
+ ///
+ /// If not provided, will look first for central then facility package.
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `-p, --package central|facility`"))]
+ #[arg(short, long)]
+ pub package: Option,
+
+ /// Print compact JSON instead of pretty
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `-c, --compact`"))]
+ #[arg(short, long)]
+ pub compact: bool,
+
+ /// Print null if key not found
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `-n, --or-null`"))]
+ #[arg(short = 'n', long)]
+ pub or_null: bool,
+
+ /// Path to a subkey
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `-k, --key`"))]
+ #[arg(short, long)]
+ pub key: Option,
+
+ /// If the value is a string, print it directly (without quotes)
+ #[cfg_attr(docsrs, doc("\n\n**Flag**: `-r, --raw`"))]
+ #[arg(short, long)]
+ pub raw: bool,
+}
+
+pub async fn run(ctx: Context) -> Result<()> {
+ let (_, root) = find_tamanu(&ctx.args_top)?;
+
+ let config = super::loader::load_config_as_object(&root, ctx.args_sub.package.as_deref())?;
+
+ let value = if let Some(key) = &ctx.args_sub.key {
+ let mut value = &config;
+ for part in key.split('.') {
+ value = match value.get(part) {
+ Some(value) => value,
+ None if ctx.args_sub.or_null => &serde_json::Value::Null,
+ None => bail!("key not found: {:?}", key),
+ };
+ }
+ value
+ } else {
+ &config
+ };
+
+ println!(
+ "{}",
+ match (ctx.args_sub, value.as_str()) {
+ (ConfigArgs { raw: true, .. }, Some(string)) => {
+ string.into()
+ }
+ (ConfigArgs { compact: true, .. }, _) => {
+ serde_json::to_string(&value).into_diagnostic()?
+ }
+ _ => serde_json::to_string_pretty(&value).into_diagnostic()?,
+ }
+ );
+
+ Ok(())
+}
diff --git a/crates/bestool/src/actions/tamanu/config/loader.rs b/crates/bestool/src/actions/tamanu/config/loader.rs
new file mode 100644
index 0000000..1afa930
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/config/loader.rs
@@ -0,0 +1,114 @@
+use std::{
+ fs::File,
+ io::Read,
+ path::{Path, PathBuf},
+};
+
+use miette::{IntoDiagnostic, Result, WrapErr};
+use tracing::{debug, instrument};
+
+#[instrument(level = "debug")]
+pub fn load_config(root: &Path, package: Option<&str>) -> Result {
+ serde_json::from_value(load_config_as_object(root, package)?).into_diagnostic()
+}
+
+#[instrument(level = "debug")]
+pub fn load_config_as_object(root: &Path, package: Option<&str>) -> Result {
+ let mut config = package_config(root, package, "default.json5")
+ .transpose()?
+ .unwrap_or_else(|| serde_json::Value::Object(Default::default()));
+
+ if let Ok(env_name) = std::env::var("NODE_ENV") {
+ if let Some(env_config) =
+ package_config(root, package, &format!("{env_name}.json5")).transpose()?
+ {
+ config = merge_json(config, env_config);
+ }
+ } else {
+ if let Some(env_config) = package_config(root, package, "production.json5").transpose()? {
+ config = merge_json(config, env_config);
+ }
+ }
+
+ if let Some(local_config) = package_config(root, package, "local.json5").transpose()? {
+ config = merge_json(config, local_config);
+ }
+
+ Ok(config)
+}
+
+#[instrument(level = "debug")]
+pub fn find_config_dir(root: &Path, package: Option<&str>, file: &str) -> Option {
+ // Windows installs
+ if let Some(package) = package {
+ let path = root
+ .join("packages")
+ .join(package)
+ .join("config")
+ .join(file);
+ if path.exists() {
+ return Some(path);
+ }
+ } else {
+ for package in ["central-server", "facility-server"] {
+ let path = root
+ .join("packages")
+ .join(package)
+ .join("config")
+ .join(file);
+ if path.exists() {
+ return Some(path);
+ }
+ }
+ }
+
+ // Linux installs
+ let path = root.join(file);
+ if path.exists() {
+ return Some(path);
+ }
+
+ None
+}
+
+#[instrument(level = "debug")]
+pub fn package_config(
+ root: &Path,
+ package: Option<&str>,
+ file: &str,
+) -> Option> {
+ fn inner(path: &Path) -> Result {
+ debug!(?path, "opening config file");
+ let mut file = File::open(path).into_diagnostic()?;
+
+ let mut contents = String::new();
+ let bytes = file.read_to_string(&mut contents).into_diagnostic()?;
+ debug!(%bytes, "read config file");
+
+ let config: serde_json::Value = json5::from_str(&contents).into_diagnostic()?;
+ Ok(config)
+ }
+
+ find_config_dir(root, package, file)
+ .map(|path| inner(&path).wrap_err(path.to_string_lossy().into_owned()))
+}
+
+#[instrument(level = "trace")]
+pub fn merge_json(
+ mut base: serde_json::Value,
+ mut overlay: serde_json::Value,
+) -> serde_json::Value {
+ if let (Some(base), Some(overlay)) = (base.as_object_mut(), overlay.as_object_mut()) {
+ for (key, value) in overlay {
+ if let Some(base_value) = base.get_mut(key) {
+ *base_value = merge_json(base_value.clone(), value.clone());
+ } else {
+ base.insert(key.clone(), value.clone());
+ }
+ }
+ } else {
+ // If either or both of `base` and `overlay` are scalar values, it must be safe to simply overwrite the base.
+ base = overlay
+ }
+ base
+}
diff --git a/crates/bestool/src/actions/tamanu/config/structure.rs b/crates/bestool/src/actions/tamanu/config/structure.rs
new file mode 100644
index 0000000..ec6ad73
--- /dev/null
+++ b/crates/bestool/src/actions/tamanu/config/structure.rs
@@ -0,0 +1,25 @@
+#[derive(Debug, Clone, serde::Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct TamanuConfig {
+ pub canonical_host_name: String,
+ pub db: Database,
+ pub mailgun: Mailgun,
+}
+
+#[derive(Debug, Clone, serde::Deserialize)]
+pub struct Database {
+ pub host: Option,
+ pub name: String,
+ pub username: String,
+ pub password: String,
+}
+
+#[derive(Debug, Clone, serde::Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Mailgun {
+ pub domain: String,
+ pub api_key: String,
+
+ #[serde(rename = "from")]
+ pub sender: String,
+}
diff --git a/crates/bestool/src/actions/tamanu/greenmask_config.rs b/crates/bestool/src/actions/tamanu/greenmask_config.rs
index eeefc21..272bd79 100644
--- a/crates/bestool/src/actions/tamanu/greenmask_config.rs
+++ b/crates/bestool/src/actions/tamanu/greenmask_config.rs
@@ -9,21 +9,12 @@ use walkdir::WalkDir;
use crate::actions::{tamanu::find_postgres_bin, Context};
-use super::{config::load_config, find_package, find_tamanu, ApiServerKind, TamanuArgs};
+use super::{config::load_config, find_tamanu, TamanuArgs};
/// Generate a Greenmask config file.
#[cfg_attr(docsrs, doc("\n\n**Command**: `bestool tamanu greenmask-config`"))]
#[derive(Debug, Clone, Parser)]
pub struct GreenmaskConfigArgs {
- /// Package to load config from.
- ///
- /// By default, this command looks for the most recent installed version of Tamanu and tries to
- /// look for an appropriate config. If both central and facility servers are present and
- /// configured, it will pick one arbitrarily.
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `--kind central|facility`"))]
- #[arg(long)]
- pub kind: Option,
-
/// Folders containing table masking definitions.
///
/// Can be specified multiple times, entries will be merged.
@@ -44,24 +35,6 @@ pub struct GreenmaskConfigArgs {
pub storage_dir: Option,
}
-#[derive(serde::Deserialize, Debug)]
-struct TamanuConfig {
- db: Db,
-}
-
-fn default_host() -> String {
- "localhost".into()
-}
-
-#[derive(serde::Deserialize, Debug)]
-struct Db {
- #[serde(default = "default_host")]
- host: String,
- name: String,
- username: String,
- password: String,
-}
-
#[derive(serde::Serialize, Debug)]
struct GreenmaskConfig {
common: GreenmaskCommon,
@@ -137,11 +110,7 @@ pub async fn run(ctx: Context) -> Result<()> {
let (_, tamanu_folder) = find_tamanu(&ctx.args_top)?;
let root = tamanu_folder.parent().unwrap();
- let kind = ctx.args_sub.kind.unwrap_or_else(|| find_package(&tamanu_folder));
- let config_value = load_config(&tamanu_folder, kind.package_name())?;
- let tamanu_config: TamanuConfig = serde_json::from_value(config_value)
- .into_diagnostic()
- .wrap_err("parsing of Tamanu config failed")?;
+ let config = load_config(&tamanu_folder, None)?;
let pg_bin_path = find_postgres_bin("psql").wrap_err("failed to find psql executable")?;
let tmp_dir = temp_dir();
@@ -214,10 +183,10 @@ pub async fn run(ctx: Context) -> Result<()> {
pg_dump_options: GreenmaskDumpOptions {
dbname: format!(
"host='{}' user='{}' password='{}' dbname='{}'",
- tamanu_config.db.host,
- tamanu_config.db.username,
- tamanu_config.db.password,
- tamanu_config.db.name
+ config.db.host.as_deref().unwrap_or("localhost"),
+ config.db.username,
+ config.db.password,
+ config.db.name
),
schema: "public".into(),
},
diff --git a/crates/bestool/src/actions/tamanu/psql.rs b/crates/bestool/src/actions/tamanu/psql.rs
index 2fcb9f8..95e34cf 100644
--- a/crates/bestool/src/actions/tamanu/psql.rs
+++ b/crates/bestool/src/actions/tamanu/psql.rs
@@ -5,28 +5,16 @@ use miette::{Context as _, IntoDiagnostic, Result};
use crate::actions::Context;
-use super::{
- config::load_config, find_package, find_postgres_bin, find_tamanu, ApiServerKind, TamanuArgs,
-};
+use super::{config::load_config, find_postgres_bin, find_tamanu, TamanuArgs};
/// Connect to Tamanu's db via `psql`.
#[cfg_attr(docsrs, doc("\n\n**Command**: `bestool tamanu psql`"))]
#[derive(Debug, Clone, Parser)]
pub struct PsqlArgs {
- /// Package to load config from.
- ///
- /// By default, this command looks for the most recent installed version of Tamanu and tries to
- /// look for an appropriate config. If both central and facility servers are present and
- /// configured, it will pick one arbitrarily.
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-k, -kind central|facility`"))]
- #[arg(short, long)]
- pub kind: Option,
-
/// Connect to postgres with a different username.
///
/// This may prompt for a password depending on your local settings and pg_hba config.
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-u, -U, --username STRING`"))]
- #[arg(short = 'U', long, alias = "u")]
+ #[arg(short = 'U', long)]
pub username: Option,
/// Enable write mode for this psql.
@@ -34,32 +22,14 @@ pub struct PsqlArgs {
/// By default we set `TRANSACTION READ ONLY` for the session, which prevents writes. To enable
/// writes, either pass this flag, or call `SET SESSION CHARACTERISTICS AS TRANSACTION READ WRITE;`
/// within the session.
- #[cfg_attr(docsrs, doc("\n\n**Flag**: `-W, --write`"))]
#[arg(short = 'W', long)]
pub write: bool,
}
-/// The Tamanu config only describing the part `psql` needs
-#[derive(serde::Deserialize, Debug)]
-struct Config {
- db: Db,
-}
-
-#[derive(serde::Deserialize, Debug)]
-struct Db {
- name: String,
- username: String,
- password: String,
-}
-
pub async fn run(ctx: Context) -> Result<()> {
let (_, root) = find_tamanu(&ctx.args_top)?;
- let kind = ctx.args_sub.kind.unwrap_or_else(|| find_package(&root));
- let config_value = load_config(&root, kind.package_name())?;
- let config: Config = serde_json::from_value(config_value)
- .into_diagnostic()
- .wrap_err("parsing of Tamanu config failed")?;
+ let config = load_config(&root, None)?;
let name = &config.db.name;
let (username, password) = if let Some(ref username) = ctx.args_sub.username {
// Rely on `psql` password prompt by making the password parameter empty.