diff --git a/Makefile b/Makefile index 618d73f591..c12b448c7e 100644 --- a/Makefile +++ b/Makefile @@ -19,28 +19,39 @@ export $(shell sed 's/=.*//' .env) # 2. Edit the `.env` file as needed to update variables and secrets # 3. Run `make web` -env: | setup ## Copies the default ./env_config/env.docker to ./.env +setup: | setup-env check-env ## Copies the default ./env_config/env.docker to ./.env +env: | setup-env check-env ## Copies the default ./env_config/env.docker to ./.env -postgres: | close build-postgres run-postgres ## Performs all commands necessary to run the postgres project (db) in docker -backend: | close build-backend run-backend ## Performs all commands necessary to run all backend projects (db, api) in docker -web: | close build-web check-env run-web ## Performs all commands necessary to run all backend+web projects (db, api, app) in docker +postgres: | close check-env build-postgres run-postgres ## Performs all commands necessary to run the postgres project (db) in docker +backend: | close check-env build-backend run-backend ## Performs all commands necessary to run all backend projects (db, api) in docker +web: | close check-env build-web run-web ## Performs all commands necessary to run all backend+web projects (db, api, app) in docker -db-setup: | build-db-setup run-db-setup ## Performs all commands necessary to run the database migrations and seeding +db-setup: | check-env build-db-setup run-db-setup ## Performs all commands necessary to run the database migrations and seeding -clamav: | build-clamav run-clamav ## Performs all commands necessary to run clamav +clamav: | check-env build-clamav run-clamav ## Performs all commands necessary to run clamav fix: | lint-fix format-fix ## Performs both lint-fix and format-fix commands ## ------------------------------------------------------------------------------ -## Setup/Cleanup Commands +## Setup Commands ## ------------------------------------------------------------------------------ -setup: ## Prepares the environment variables used by all project docker containers +setup-env: ## Prepares the environment variables used by all project docker containers, by copying the sample 'env.docker' to '.env'. Note: Some variables may need to be updated, like secrets @echo "===============================================" - @echo "Make: setup - copying env.docker to .env" + @echo "Make: setup-env - copying env.docker to .env" @echo "===============================================" @cp -i env_config/env.docker .env +check-env: ## Logs any env vars that are missing or have no value, in the '.env' file + @echo "===============================================" + @echo "Make: check-env - checking for missing env vars" + @echo "===============================================" + @awk -F '=' 'NR==FNR && !/^#/ && NF {a[$$1]; next} !/^#/ && NF && !($$1 in a)' .env env_config/env.docker | while read -r line; do echo "Warning: Missing value for $$line in .env"; done + +## ------------------------------------------------------------------------------ +## Cleanup Commands +## ------------------------------------------------------------------------------ + close: ## Closes all project containers @echo "===============================================" @echo "Make: close - closing Docker containers" @@ -61,12 +72,6 @@ prune: ## Deletes ALL docker artifacts (even those not associated to this projec @docker system prune --all --volumes -f @docker volume prune --all -f -check-env: ## Check for missing env vars - @echo "===============================================" - @echo "Make: check-env - checking for missing env vars" - @echo "===============================================" - @awk -F '=' 'NR==FNR && !/^#/ && NF {a[$$1]; next} !/^#/ && NF && !($$1 in a)' .env env_config/env.docker - ## ------------------------------------------------------------------------------ ## Build/Run Postgres DB Commands ## - Builds all of the SIMS postgres db projects (db, db_setup) @@ -142,6 +147,16 @@ api-container: ## Executes into the api container. @docker compose exec api bash ## ------------------------------------------------------------------------------ +## Cronjob commands +## - You can include additional CLI arguments by appending the `args` param +## - Ex: `make telemetry-cronjob args="--concurrently 100 --batchSize 1000"` +## ------------------------------------------------------------------------------ +telemetry-cronjob: ## Run the telemetry cronjob + @echo "===============================================" + @echo "Telemetry Cronjob" + @echo "===============================================" + @docker compose exec api npm run telemetry-cronjob -- $(args) +## ------------------------------------------------------------------------------ ## Database migration commands ## ------------------------------------------------------------------------------ @@ -158,7 +173,7 @@ run-db-setup: ## Run the database migrations and seeding @docker compose up db_setup ## ------------------------------------------------------------------------------ -## clamav commands +## Clamav commands ## ------------------------------------------------------------------------------ build-clamav: ## Build the clamav image @@ -324,13 +339,13 @@ log-db-setup: ## Runs `docker logs -f` for the database setup contai ## Runs ts-trace to find typescript compilation issues and hotspots ## Docs: https://github.com/microsoft/typescript-analyze-trace ## ------------------------------------------------------------------------------ -trace-app: +trace-app: ## Runs ts-trace to find typescript compilation issues and hotspots in the app @echo "===============================================" @echo "Typscript trace - searching App hotspots" @echo "===============================================" @cd app && npx tsc -p ./tsconfig.json --generateTrace ts-traces || npx @typescript/analyze-trace --skipMillis 100 --forceMillis 300 --expandTypes ts-traces -trace-api: +trace-api: ## Runs ts-trace to find typescript compilation issues and hotspots in the api @echo "===============================================" @echo "Typscript trace - searching for Api hotspots" @echo "===============================================" diff --git a/api/.pipeline/config.js b/api/.pipeline/config.js index ac506c9645..d6b243b1e6 100644 --- a/api/.pipeline/config.js +++ b/api/.pipeline/config.js @@ -71,6 +71,8 @@ const phases = { dbName: `${dbName}`, phase: 'dev', changeId: deployChangeId, + telemetryCronjobSchedule: '0 0 * * *', // Daily at midnight + telemetryCronjobDisabled: !isStaticDeployment, suffix: `-dev-${deployChangeId}`, instance: `${name}-dev-${deployChangeId}`, version: `${deployChangeId}-${changeId}`, @@ -83,7 +85,6 @@ const phases = { backboneArtifactIntakePath: '/api/artifact/intake', biohubTaxonPath: '/api/taxonomy/taxon', biohubTaxonTsnPath: '/api/taxonomy/taxon/tsn', - bctwApiHost: 'https://moe-bctw-api-dev.apps.silver.devops.gov.bc.ca', critterbaseApiHost: 'https://moe-critterbase-api-dev.apps.silver.devops.gov.bc.ca/api', nodeEnv: 'development', s3KeyPrefix: (isStaticDeployment && 'sims') || `local/${deployChangeId}/sims`, @@ -114,6 +115,8 @@ const phases = { dbName: `${dbName}`, phase: 'test', changeId: deployChangeId, + telemetryCronjobSchedule: '0 0 * * *', // Daily at midnight + telemetryCronjobDisabled: !isStaticDeployment, suffix: `-test`, instance: `${name}-test`, version: `${version}`, @@ -126,7 +129,6 @@ const phases = { backboneArtifactIntakePath: '/api/artifact/intake', biohubTaxonPath: '/api/taxonomy/taxon', biohubTaxonTsnPath: '/api/taxonomy/taxon/tsn', - bctwApiHost: 'https://moe-bctw-api-test.apps.silver.devops.gov.bc.ca', critterbaseApiHost: 'https://moe-critterbase-api-test.apps.silver.devops.gov.bc.ca/api', nodeEnv: 'production', s3KeyPrefix: 'sims', @@ -157,6 +159,8 @@ const phases = { dbName: `${dbName}-spi`, phase: 'test-spi', changeId: deployChangeId, + telemetryCronjobSchedule: '0 0 * * *', // Daily at midnight + telemetryCronjobDisabled: !isStaticDeployment, suffix: `-test-spi`, instance: `${name}-spi-test-spi`, version: `${version}`, @@ -169,7 +173,6 @@ const phases = { backboneArtifactIntakePath: '/api/artifact/intake', biohubTaxonPath: '/api/taxonomy/taxon', biohubTaxonTsnPath: '/api/taxonomy/taxon/tsn', - bctwApiHost: 'https://moe-bctw-api-test.apps.silver.devops.gov.bc.ca', critterbaseApiHost: 'https://moe-critterbase-api-test.apps.silver.devops.gov.bc.ca/api', nodeEnv: 'production', s3KeyPrefix: 'sims', @@ -200,6 +203,8 @@ const phases = { dbName: `${dbName}`, phase: 'prod', changeId: deployChangeId, + telemetryCronjobSchedule: '0 0 * * *', // Daily at midnight + telemetryCronjobDisabled: !isStaticDeployment, suffix: `-prod`, instance: `${name}-prod`, version: `${version}`, @@ -212,7 +217,6 @@ const phases = { backboneArtifactIntakePath: '/api/artifact/intake', biohubTaxonPath: '/api/taxonomy/taxon', biohubTaxonTsnPath: '/api/taxonomy/taxon/tsn', - bctwApiHost: 'https://moe-bctw-api-prod.apps.silver.devops.gov.bc.ca', critterbaseApiHost: 'https://moe-critterbase-api-prod.apps.silver.devops.gov.bc.ca/api', nodeEnv: 'production', s3KeyPrefix: 'sims', diff --git a/api/.pipeline/lib/api.deploy.js b/api/.pipeline/lib/api.deploy.js index 08a1266580..679f4aad23 100644 --- a/api/.pipeline/lib/api.deploy.js +++ b/api/.pipeline/lib/api.deploy.js @@ -25,12 +25,16 @@ const apiDeploy = async (settings) => { objects.push( ...oc.processDeploymentTemplate(`${templatesLocalBaseUrl}/api.dc.yaml`, { param: { + NAMESPACE: phases[phase].namespace, NAME: phases[phase].name, SUFFIX: phases[phase].suffix, VERSION: phases[phase].tag, HOST: phases[phase].host, APP_HOST: phases[phase].appHost, CHANGE_ID: phases.build.changeId || changeId, + // Cronjobs + TELEMETRY_CRONJOB_SCHEDULE: phases[phase].telemetryCronjobSchedule, + TELEMETRY_CRONJOB_DISABLED: phases[phase].telemetryCronjobDisabled, // Node NODE_ENV: phases[phase].nodeEnv, NODE_OPTIONS: phases[phase].nodeOptions, @@ -42,8 +46,7 @@ const apiDeploy = async (settings) => { BACKBONE_ARTIFACT_INTAKE_PATH: phases[phase].backboneArtifactIntakePath, BIOHUB_TAXON_PATH: phases[phase].biohubTaxonPath, BIOHUB_TAXON_TSN_PATH: phases[phase].biohubTaxonTsnPath, - // BCTW / Critterbase - BCTW_API_HOST: phases[phase].bctwApiHost, + // Critterbase CB_API_HOST: phases[phase].critterbaseApiHost, // S3 S3_KEY_PREFIX: phases[phase].s3KeyPrefix, diff --git a/api/.pipeline/lib/clean.js b/api/.pipeline/lib/clean.js index f76bfe8129..ea5fed7e76 100644 --- a/api/.pipeline/lib/clean.js +++ b/api/.pipeline/lib/clean.js @@ -67,7 +67,7 @@ const clean = (settings) => { namespace: phaseObj.namespace }); - oc.raw('delete', ['all,pvc,secrets,Secrets,secret,configmap,endpoints,Endpoints'], { + oc.raw('delete', ['all,pvc,secrets,Secrets,secret,configmap,endpoints,Endpoints,cronjobs,Cronjobs'], { selector: `app=${phaseObj.instance},env-id=${phaseObj.changeId},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, wait: 'true', namespace: phaseObj.namespace diff --git a/api/.pipeline/templates/README.md b/api/.pipeline/templates/README.md index 5053b7343d..7f7f004327 100644 --- a/api/.pipeline/templates/README.md +++ b/api/.pipeline/templates/README.md @@ -10,3 +10,9 @@ The pipeline code builds and deploys all pods/images/storage/etc needed to deplo - Create ObjectStore Secret The included templates under `prereqs` can be imported via the "Import YAML" page in OpenShift. + +## Telemetry Cronjob + +How to manually trigger cronjob? + +- `oc create job --from=cronjob/biohubbc-telemetry-cronjob- ` diff --git a/api/.pipeline/templates/api.dc.yaml b/api/.pipeline/templates/api.dc.yaml index 82dd6713db..58ec2fb439 100644 --- a/api/.pipeline/templates/api.dc.yaml +++ b/api/.pipeline/templates/api.dc.yaml @@ -5,6 +5,12 @@ metadata: labels: build: biohubbc-api parameters: + - name: NAMESPACE + description: Openshift namespace name + value: '' + - name: BASE_IMAGE_REGISTRY_URL + description: The base image registry URL + value: image-registry.openshift-image-registry.svc:5000 - name: NAME value: biohubbc-api - name: SUFFIX @@ -61,13 +67,10 @@ parameters: - name: BIOHUB_TAXON_PATH required: true description: API path for BioHub Platform Backbone taxon endpoint. Example "/api/path/to/taxon". - # BCTW / Critterbase + # Critterbase - name: CB_API_HOST description: API host for the Critterbase service, SIMS API will hit this to retrieve critter metadata. Example "https://critterbase.com". required: true - - name: BCTW_API_HOST - description: API host for the BC Telemetry Warehouse service. SIMS API will hit this for device deployments and other telemetry operations. Example "https://bctw.com". - required: true # Database - name: TZ description: Application timezone @@ -76,6 +79,10 @@ parameters: - name: DB_SERVICE_NAME description: 'Database service name associated with deployment' required: true + - name: DB_PORT + description: 'Database port' + required: true + value: '5432' # Keycloak - name: KEYCLOAK_HOST description: Key clock login url @@ -195,6 +202,22 @@ parameters: value: '1' - name: REPLICAS_MAX value: '1' + # Telemetry + - name: TELEMETRY_CRONJOB_SCHEDULE + description: The schedule for the telemetry cronjob + value: '0 0 * * *' # 12am + - name: TELEMETRY_CRONJOB_DISABLED + description: Boolean flag to disable the cronjob, only static deployments should run on schedule. + value: 'true' + - name: TELEMETRY_SECRET + description: The name of the Openshift Biohubbc telemetry secret + value: biohubbc-telemetry + - name: LOTEK_API_HOST + description: The host URL for Lotek webservice API + value: https://webservice.lotek.com + - name: VECTRONIC_API_HOST + description: The host URL for Vectronic webservice API + value: https://api.vectronic-wildlife.com/v2 objects: - kind: ImageStream apiVersion: image.openshift.io/v1 @@ -283,11 +306,9 @@ objects: value: ${BIOHUB_TAXON_TSN_PATH} - name: BIOHUB_TAXON_PATH value: ${BIOHUB_TAXON_PATH} - # BCTW / Critterbase + # Critterbase - name: CB_API_HOST value: ${CB_API_HOST} - - name: BCTW_API_HOST - value: ${BCTW_API_HOST} # Clamav - name: ENABLE_FILE_VIRUS_SCAN value: ${ENABLE_FILE_VIRUS_SCAN} @@ -316,7 +337,22 @@ objects: key: database-name name: ${DB_SERVICE_NAME} - name: DB_PORT - value: '5432' + value: ${DB_PORT} + # Telemetry + - name: LOTEK_API_HOST + value: ${LOTEK_API_HOST} + - name: LOTEK_ACCOUNT_USERNAME + valueFrom: + secretKeyRef: + key: lotek_account_username + name: ${TELEMETRY_SECRET} + - name: LOTEK_ACCOUNT_PASSWORD + valueFrom: + secretKeyRef: + key: lotek_account_password + name: ${TELEMETRY_SECRET} + - name: VECTRONIC_API_HOST + value: ${VECTRONIC_API_HOST} # Keycloak - name: KEYCLOAK_HOST value: ${KEYCLOAK_HOST} @@ -537,6 +573,96 @@ objects: status: ingress: null + - kind: CronJob + apiVersion: batch/v1 + metadata: + name: biohubbc-telemetry-cronjob${SUFFIX} + labels: + role: telemetry-cronjob + spec: + schedule: ${TELEMETRY_CRONJOB_SCHEDULE} + suspend: ${{TELEMETRY_CRONJOB_DISABLED}} + concurrencyPolicy: 'Forbid' + successfulJobsHistoryLimit: 1 + failedJobsHistoryLimit: 1 + jobTemplate: + spec: + backoffLimit: 0 + template: + spec: + containers: + - name: api + image: ${BASE_IMAGE_REGISTRY_URL}/${NAMESPACE}/${NAME}:${VERSION} + imagePullPolicy: Always + restartPolicy: 'Never' + terminationGracePeriodSeconds: 30 + activeDeadlineSeconds: 220 + env: + - name: NODE_ENV + value: ${NODE_ENV} + - name: NODE_OPTIONS + value: ${NODE_OPTIONS} + # Database + - name: TZ + value: ${TZ} + - name: DB_HOST + value: ${DB_SERVICE_NAME} + - name: DB_USER_API + valueFrom: + secretKeyRef: + key: database-user-api + name: ${DB_SERVICE_NAME} + - name: DB_USER_API_PASS + valueFrom: + secretKeyRef: + key: database-user-api-password + name: ${DB_SERVICE_NAME} + - name: DB_DATABASE + valueFrom: + secretKeyRef: + key: database-name + name: ${DB_SERVICE_NAME} + - name: DB_PORT + value: ${DB_PORT} + # Telemetry + - name: LOTEK_API_HOST + value: ${LOTEK_API_HOST} + - name: LOTEK_ACCOUNT_USERNAME + valueFrom: + secretKeyRef: + key: lotek_account_username + name: ${TELEMETRY_SECRET} + - name: LOTEK_ACCOUNT_PASSWORD + valueFrom: + secretKeyRef: + key: lotek_account_password + name: ${TELEMETRY_SECRET} + - name: VECTRONIC_API_HOST + value: ${VECTRONIC_API_HOST} + # Logging + - name: LOG_LEVEL + value: ${LOG_LEVEL} + - name: LOG_LEVEL_FILE + value: data/cronjob-logs + - name: LOG_FILE_DIR + value: ${LOG_FILE_DIR} + - name: LOG_FILE_NAME + value: sims-telemetry-cronjob-%DATE%.log + - name: LOG_FILE_DATE_PATTERN + value: ${LOG_FILE_DATE_PATTERN} + - name: LOG_FILE_MAX_SIZE + value: ${LOG_FILE_MAX_SIZE} + - name: LOG_FILE_MAX_FILES + value: ${LOG_FILE_MAX_FILES} + # Api Validation + - name: API_RESPONSE_VALIDATION_ENABLED + value: ${API_RESPONSE_VALIDATION_ENABLED} + - name: DATABASE_RESPONSE_VALIDATION_ENABLED + value: ${DATABASE_RESPONSE_VALIDATION_ENABLED} + command: ["npm", "run", "telemetry-cronjob", "--", "--batchSize 1000", "--concurrently 100"] + restartPolicy: Never + + # Disable the HPA for now, as it is preferrable to run an exact number of pods (e.g. min:2, max:2) # - kind: HorizontalPodAutoscaler # apiVersion: autoscaling/v2 diff --git a/api/.pipeline/templates/prereqs/biohubbc-telemetry.yaml b/api/.pipeline/templates/prereqs/biohubbc-telemetry.yaml new file mode 100644 index 0000000000..85e80f199a --- /dev/null +++ b/api/.pipeline/templates/prereqs/biohubbc-telemetry.yaml @@ -0,0 +1,8 @@ +kind: Secret +apiVersion: v1 +metadata: + name: biohubbc-telemetry +data: + lotek_acount_username: + lotek_acount_password: +type: Opaque diff --git a/api/package-lock.json b/api/package-lock.json index 104e2bf37e..73dfd1105c 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -29,6 +29,7 @@ "fast-deep-equal": "^3.1.3", "fast-json-patch": "^3.1.1", "fast-xml-parser": "^4.2.5", + "fastq": "^1.17.1", "form-data": "^4.0.0", "http-proxy-middleware": "^2.0.6", "jsonpath-plus": "^7.2.0", @@ -4971,7 +4972,6 @@ "version": "1.17.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", - "dev": true, "dependencies": { "reusify": "^1.0.4" } @@ -8782,7 +8782,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" diff --git a/api/package.json b/api/package.json index 463bee9848..8c11407cd2 100644 --- a/api/package.json +++ b/api/package.json @@ -19,7 +19,8 @@ "lint-fix": "eslint . --fix --ignore-pattern 'node_modules' --ext .ts", "format": "prettier --loglevel=warn --check \"./src/**/*.{js,jsx,ts,tsx,css,scss}\"", "format-fix": "prettier --loglevel=warn --write \"./src/**/*.{js,jsx,ts,tsx,json,css,scss}\"", - "fix": "npm-run-all -l -s lint-fix format-fix" + "fix": "npm-run-all -l -s lint-fix format-fix", + "telemetry-cronjob": "ts-node src/cronjobs/telemetry/index" }, "engines": { "node": ">= 18.0.0", @@ -46,6 +47,7 @@ "fast-deep-equal": "^3.1.3", "fast-json-patch": "^3.1.1", "fast-xml-parser": "^4.2.5", + "fastq": "^1.17.1", "form-data": "^4.0.0", "http-proxy-middleware": "^2.0.6", "jsonpath-plus": "^7.2.0", diff --git a/api/src/app.ts b/api/src/app.ts index 42a1de6a02..896706fb8f 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -13,12 +13,12 @@ import { } from './middleware/critterbase-proxy'; import { rootAPIDoc } from './openapi/root-api-doc'; import { authenticateRequest, authenticateRequestOptional } from './request-handlers/security/authentication'; -import { loadEvironmentVariables } from './utils/env-config'; +import { loadEnvironmentVariables } from './utils/env-config'; import { scanFileForVirus } from './utils/file-utils'; import { getLogger } from './utils/logger'; // Load and validate the environment variables -loadEvironmentVariables(); +loadEnvironmentVariables(); const defaultLog = getLogger('app'); diff --git a/api/src/cronjobs/telemetry/cronjob.test.ts b/api/src/cronjobs/telemetry/cronjob.test.ts new file mode 100644 index 0000000000..b28eb3ae02 --- /dev/null +++ b/api/src/cronjobs/telemetry/cronjob.test.ts @@ -0,0 +1,97 @@ +import chai, { expect } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../database/db'; +import { TelemetryLotekService } from '../../services/telemetry-services/telemetry-lotek-service'; +import { TelemetryVectronicService } from '../../services/telemetry-services/telemetry-vectronic-service'; +import { getMockDBConnection } from '../../__mocks__/db'; +import * as cronjob from './cronjob'; + +chai.use(sinonChai); + +describe('Telemetry Cronjob', () => { + beforeEach(() => { + sinon.restore(); + }); + + describe('main', () => { + it('should run successfully and return results', async () => { + sinon.stub(cronjob, 'parseArguments').returns({ + concurrently: 2, + batchSize: 4, + startDate: undefined, + endDate: undefined + }); + + sinon.stub(db, 'initDBPool').returns(undefined); + + const mockConnection = getMockDBConnection({ + open: sinon.stub(), + release: sinon.stub() + }); + + sinon.stub(db, 'getAPIUserDBConnection').returns(mockConnection); + + const lotekFetchStub = sinon.stub(TelemetryLotekService.prototype, 'fetchDevicesFromLotek'); + const vectronicFetchStub = sinon.stub(TelemetryVectronicService.prototype, 'getDeviceCredentials'); + + const lotekProcessStub = sinon.stub(TelemetryLotekService.prototype, 'processTelemetry'); + const vectronicProcessStub = sinon.stub(TelemetryVectronicService.prototype, 'processTelemetry'); + + lotekFetchStub.resolves([{ nDeviceID: 1 }] as any); + vectronicFetchStub.resolves([{ idcollar: 1, collarkey: 'test-collar-key' }] as any); + + lotekProcessStub.resolves([{ task: { serial: 1 }, value: { new: 1, created: 1 } }]); + vectronicProcessStub.resolves([{ task: { serial: 1, key: 'test-collar-key' }, value: { new: 1, created: 1 } }]); + + await cronjob.telemetryCronjob(); + + expect(mockConnection.open).to.have.been.calledOnceWithExactly({ transaction: false }); + + expect(lotekFetchStub).to.have.been.calledOnce; + expect(vectronicFetchStub).to.have.been.calledOnce; + + expect(lotekProcessStub).to.have.been.calledOnceWithExactly([{ serial: 1 }], { + concurrently: 2, + batchSize: 4, + startDate: undefined, + endDate: undefined + }); + + expect(vectronicProcessStub).to.have.been.calledOnceWithExactly([{ serial: 1, key: 'test-collar-key' }], { + concurrently: 2, + batchSize: 4, + startDate: undefined, + endDate: undefined + }); + }); + + it('should always release the connection', async () => { + sinon.stub(cronjob, 'parseArguments').returns({ + concurrently: 2, + batchSize: 4, + startDate: undefined, + endDate: undefined + }); + + sinon.stub(db, 'initDBPool').returns(undefined); + + const mockConnection = getMockDBConnection({ + open: sinon.stub(), + release: sinon.stub() + }); + + sinon.stub(db, 'getAPIUserDBConnection').returns(mockConnection); + + sinon.stub(TelemetryLotekService.prototype, 'fetchDevicesFromLotek').rejects('failed'); + + try { + await cronjob.telemetryCronjob(); + expect.fail(); + } catch (err) { + expect(mockConnection.open).to.have.been.calledOnce; + expect(mockConnection.release).to.have.been.calledOnce; + } + }); + }); +}); diff --git a/api/src/cronjobs/telemetry/cronjob.ts b/api/src/cronjobs/telemetry/cronjob.ts new file mode 100644 index 0000000000..cbd102b84b --- /dev/null +++ b/api/src/cronjobs/telemetry/cronjob.ts @@ -0,0 +1,168 @@ +import { parseArgs } from 'util'; +import { z } from 'zod'; +import { defaultPoolConfig, getAPIUserDBConnection, initDBPool } from '../../database/db'; +import { ApiGeneralError } from '../../errors/api-error'; +import { TelemetryLotekService } from '../../services/telemetry-services/telemetry-lotek-service'; +import { TelemetryVectronicService } from '../../services/telemetry-services/telemetry-vectronic-service'; +import { TelemetryProcessingResult } from '../../services/telemetry-services/telemetry.interface'; +import { getLogger } from '../../utils/logger'; +import { QueueResult } from '../../utils/task-queue'; + +const defaultLog = getLogger('telemetry-cronjob'); + +/** + * Telemetry Cronjob: Handles fetching Vectronic and Lotek telemetry and inserting it into the database. + * + * Information: + * + * How to run: + * - Default: `npm run telemetry-cronjob` // defaults to: concurrently = 100 and batchSize = 1000 + * - CLI args: `npm run telemetry-cronjob -- --concurrently 100 --batchSize 1000 --startDate 2021-01-01 --endDate 2021-01-31` + * + * Telemetry device processing flow: + * 1. Fetch the telemetry count from the vendor API. + * 2. Fetch the telemetry count from the SIMS database. + * 3. Compare and check for missing telemetry records. + * 4. Fetch the telemetry data from the vendor API. See `Date ranges` section below. + * 5. Insert the telemetry data into the SIMS database. + * + * Date ranges: + * If a date range is provided, the cronjob will fetch telemetry for that date range. + * If no date range is provided, the cronjob will fetch all telemetry data after the last record in the database. + * Lotek: We find the last record in the database and use the timestamp as the start date. + * Vectronic: We find the largest / max idposition (the Vectronic PK ID) and use the `gt-id` query parameter. + * + * + * Web Services: + * - Lotek: https://webservice.lotek.com/API/Help + * - Vectronic: https://api.vectronic-wildlife.com/swagger-ui/index.html?configUrl=/v3/api-docs/swagger-config# + * + * @returns {*} {Promise} + */ +export async function telemetryCronjob() { + // 0. SETUP - Parse CLI arguments, initialize the database and get a connection + const args = parseArguments(); + defaultLog.info({ message: 'Cronjob starting.', args }); + + initDBPool(defaultPoolConfig); + + const connection = getAPIUserDBConnection(); + + try { + await connection.open({ transaction: false }); // Open a non-transaction database connection + + // 1. INITIALIZE SERVICES - Lotek + Vectronic + defaultLog.info({ message: 'Initializing services.' }); + const vectronicService = new TelemetryVectronicService(connection); + const lotekService = new TelemetryLotekService(connection); + + // 2. FETCH DEVICES AND CREDENTIALS - Fetch devices from Lotek and get SIMS Vectronic credentials + defaultLog.info({ message: 'Fetching devices and credentials.' }); + let lotekDevices = await lotekService.fetchDevicesFromLotek(); // Fetch the lotek account devices + let vectronicDevices = await vectronicService.getDeviceCredentials(); // Fetch the vectronic account devices + + // Optional device limit for testing + if (args._test_maxDevices) { + lotekDevices = lotekDevices.slice(0, args._test_maxDevices); + vectronicDevices = vectronicDevices.slice(0, args._test_maxDevices); + } + + // 3. GENERATE QUEUEABLE TASKS - Create tasks for each device + defaultLog.info({ message: 'Generating tasks.' }); + const lotekTasks = lotekDevices.map((device) => ({ serial: device.nDeviceID })); // Create a task for each device + const vectronicTasks = vectronicDevices.map((device) => ({ serial: device.idcollar, key: device.collarkey })); + + // 4. PROCESS TELEMETRY - Fetch telemetry from the vendor API and insert it into the SIMS database + defaultLog.info({ message: 'Processing telemetry.' }); + const lotekResults = await lotekService.processTelemetry(lotekTasks, args); + const vectronicResults = await vectronicService.processTelemetry(vectronicTasks, args); + + // 5. PARSE RESULTS - Parse the telemetry processing results for logging + const parsedLotek = parseResults('Lotek', lotekResults); + const parsedVectronic = parseResults('Vectronic', vectronicResults); + + return { + new: parsedLotek.new + parsedVectronic.new, + created: parsedLotek.created + parsedVectronic.created, + errors: parsedLotek.errors.concat(parsedVectronic.errors) + }; + } finally { + connection.release(); // No commit or rollback is needed (not in a transaction) + } +} + +/** + * Parse the results of the telemetry processing. + * + * @param {string} vendor The vendor name. + * @param {QueueResult<{ serial: number }, TelemetryProcessingResult>[]} results The telemetry processing results. + * @returns {*} The parsed telemetry results. + */ +export const parseResults = (vendor: string, results: QueueResult<{ serial: number }, TelemetryProcessingResult>[]) => { + let newTelemetry = 0; + let createdTelemetry = 0; + const errors = []; + + for (const result of results) { + if (result.error) { + errors.push(result.error); + } + + if (result.value) { + newTelemetry += result.value.new; + createdTelemetry += result.value.created; + } + } + + if (results.length && errors.length === results.length) { + defaultLog.error({ + label: 'Partial Failure', + vendor: vendor, + message: 'Partial failure detected. All resolved results contained a thrown error.', + firstError: errors[0] + }); + + throw new ApiGeneralError(`All tasks failed to complete for ${vendor}.`); + } + + return { + new: newTelemetry, + created: createdTelemetry, + errors + }; +}; + +/** + * Parse the CLI arguments. + * + * @returns {*} The parsed CLI arguments. + */ +export const parseArguments = () => { + const parsedArgs = parseArgs({ + args: process.argv, + options: { + // The number of requests to make concurrently + concurrently: { type: 'string', default: '100' }, + // The number of items to insert in a single batch + batchSize: { type: 'string', default: '1000' }, + // The start date for fetching telemetry data + startDate: { type: 'string' }, + // The end date for fetching telemetry data + endDate: { type: 'string' }, + // The maximum number of devices to process (for testing) + _test_maxDevices: { type: 'string' } + }, + allowPositionals: true + }); + + return z + .object({ + concurrently: z.coerce.number(), + batchSize: z.coerce.number(), + startDate: z.string().optional(), + endDate: z.string().optional(), + _test_maxDevices: z.coerce.number().optional() + }) + .strict() + .parse(parsedArgs.values); +}; diff --git a/api/src/cronjobs/telemetry/index.ts b/api/src/cronjobs/telemetry/index.ts new file mode 100644 index 0000000000..2973cda7f4 --- /dev/null +++ b/api/src/cronjobs/telemetry/index.ts @@ -0,0 +1,14 @@ +import { getLogger } from '../../utils/logger'; +import { telemetryCronjob } from './cronjob'; + +const defaultLog = getLogger('telemetry-cronjob'); + +telemetryCronjob() + .then((data) => { + defaultLog.info({ message: 'Cronjob completed.', information: data }); + process.exit(0); + }) + .catch((error) => { + defaultLog.error({ message: 'Cronjob failed.', error }); + process.exit(1); + }); diff --git a/api/src/database-models/README.md b/api/src/database-models/README.md new file mode 100644 index 0000000000..686ccd276d --- /dev/null +++ b/api/src/database-models/README.md @@ -0,0 +1,10 @@ +# Database Models & Records Structure +The files in this directory should only contain the `Data Models` and `Data Records` zod schemas and the equivalent types. + +Note: The file name should be a exact match to what is stored in the database ie: `survey.ts` + +## Data Models +1 to 1 mapping of the database table. + +## Data Records +1 to 1 mapping of the database table, ommitting the audit columns. diff --git a/api/src/database-models/critter.ts b/api/src/database-models/critter.ts new file mode 100644 index 0000000000..4e37b6deee --- /dev/null +++ b/api/src/database-models/critter.ts @@ -0,0 +1,33 @@ +import { z } from 'zod'; +/** + * Critter Model. + * + * @description Data model for `critter`. + */ +export const CritterModel = z.object({ + critter_id: z.number(), + survey_id: z.number(), + critterbase_critter_id: z.string().uuid(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type CritterModel = z.infer; + +/** + * Critter Record. + * + * @description Data record for `critter`. + */ +export const CritterRecord = CritterModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type CritterRecord = z.infer; diff --git a/api/src/database-models/critter_capture_attachment.ts b/api/src/database-models/critter_capture_attachment.ts index 9901c37da6..0e869ef52a 100644 --- a/api/src/database-models/critter_capture_attachment.ts +++ b/api/src/database-models/critter_capture_attachment.ts @@ -1,12 +1,4 @@ import { z } from 'zod'; -/** - * Note: These files should only contain the `Data Models` and `Data Records` with equivalent inferred types. - * - * Data Models contain a 1 to 1 mapping of the database table. - * - * Data Records contain a 1 to 1 mapping of the database table, minus the audit columns. - */ - /** * Critter Capture Attachment Model. * diff --git a/api/src/database-models/critter_mortality_attachment.ts b/api/src/database-models/critter_mortality_attachment.ts index f643cbf272..dac1d3f0f8 100644 --- a/api/src/database-models/critter_mortality_attachment.ts +++ b/api/src/database-models/critter_mortality_attachment.ts @@ -1,12 +1,4 @@ import { z } from 'zod'; -/** - * Note: These files should only contain the `Data Models` and `Data Records` with equivalent inferred types. - * - * Data Models contain a 1 to 1 mapping of the database table. - * - * Data Records contain a 1 to 1 mapping of the database table, minus the audit columns. - */ - /** * Critter Mortality Attachment Model. * diff --git a/api/src/database-models/deployment.ts b/api/src/database-models/deployment.ts new file mode 100644 index 0000000000..245b0adf6a --- /dev/null +++ b/api/src/database-models/deployment.ts @@ -0,0 +1,47 @@ +import { z } from 'zod'; + +/** + * Deployment Model. + * + * @description Data model for `deployment`. + */ +export const DeploymentModel = z.object({ + deployment_id: z.number(), + survey_id: z.number(), + critter_id: z.number(), + device_id: z.number(), + device_key: z.string(), + frequency: z.number().nullable(), + frequency_unit_id: z.number().nullable(), + attachment_start_date: z.string(), + attachment_start_time: z.string().nullable(), + attachment_start_timestamp: z.string(), + attachment_end_date: z.string().nullable(), + attachment_end_time: z.string().nullable(), + attachment_end_timestamp: z.string().nullable(), + critterbase_start_capture_id: z.string().uuid().nullable(), + critterbase_end_capture_id: z.string().uuid().nullable(), + critterbase_end_mortality_id: z.string().uuid().nullable(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type DeploymentModel = z.infer; + +/** + * Deployment Record. + * + * @description Data record for `deployment`. + */ +export const DeploymentRecord = DeploymentModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type DeploymentRecord = z.infer; diff --git a/api/src/database-models/device.ts b/api/src/database-models/device.ts new file mode 100644 index 0000000000..1c719ac03f --- /dev/null +++ b/api/src/database-models/device.ts @@ -0,0 +1,37 @@ +import { z } from 'zod'; +/** + * Device Model. + * + * @description Data model for `device`. + */ +export const DeviceModel = z.object({ + device_id: z.number(), + survey_id: z.number(), + device_key: z.string(), + serial: z.string(), + device_make_id: z.number(), + model: z.string().nullable(), + comment: z.string().nullable(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type DeviceModel = z.infer; + +/** + * Device Record. + * + * @description Data record for `device`. + */ +export const DeviceRecord = DeviceModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type DeviceRecord = z.infer; diff --git a/api/src/database-models/device_make.ts b/api/src/database-models/device_make.ts new file mode 100644 index 0000000000..aa4cb3f40b --- /dev/null +++ b/api/src/database-models/device_make.ts @@ -0,0 +1,38 @@ +import { z } from 'zod'; +/** + * Device Make Model. + * + * @description Data model for `device_make`. + */ +export const DeviceMakeModel = z.object({ + device_make_id: z.number(), + name: z.string(), + description: z.string().nullable(), + notes: z.string().nullable(), + record_effective_date: z.string().nullable(), + record_end_date: z.string().nullable(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type DeviceMakeModel = z.infer; + +/** + * Device Make Record. + * + * @description Data record for `device_make`. + */ +export const DeviceMakeRecord = DeviceMakeModel.omit({ + record_effective_date: true, + record_end_date: true, + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type DeviceMakeRecord = z.infer; diff --git a/api/src/database-models/telemetry_credential_lotek.ts b/api/src/database-models/telemetry_credential_lotek.ts new file mode 100644 index 0000000000..27cbbe511b --- /dev/null +++ b/api/src/database-models/telemetry_credential_lotek.ts @@ -0,0 +1,39 @@ +import { z } from 'zod'; + +/** + * Telemetry Credential Lotek Model. + * + * @description Data model for `telemetry_credential_lotek`. + */ +export const TelemetryCredentialLotekModel = z.object({ + telemetry_credential_lotek_id: z.number(), + device_key: z.string(), + ndeviceid: z.number(), + strspecialid: z.string().nullable(), + dtcreated: z.string().nullable(), + strsatellite: z.string().nullable(), + verified_date: z.string().nullable(), + is_valid: z.boolean().nullable(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type TelemetryLotekCredentialModel = z.infer; + +/** + * Telemetry Lotek Credential Record. + * + * @description Data record for `telemetry_credential_lotek`. + */ +export const TelemetryLotekCredentialRecord = TelemetryCredentialLotekModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type TelemetryLotekCredentialRecord = z.infer; diff --git a/api/src/database-models/telemetry_credential_vectronic.ts b/api/src/database-models/telemetry_credential_vectronic.ts new file mode 100644 index 0000000000..f37c214957 --- /dev/null +++ b/api/src/database-models/telemetry_credential_vectronic.ts @@ -0,0 +1,38 @@ +import { z } from 'zod'; + +/** + * Telemetry Credential Vectronic Model. + * + * @description Data model for `telemetry_credential_vectronic`. + */ +export const TelemetryCredentialVectronicModel = z.object({ + telemetry_credential_vectronic_id: z.number(), + device_key: z.string(), + idcollar: z.number(), + comtype: z.string(), + idcom: z.number(), + collarkey: z.string(), + collartype: z.number(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type TelemetryCredentialVectronicModel = z.infer; + +/** + * Telemetry Credential Vectronic Record. + * + * @description Data record for `telemetry_credential_vectronic`. + */ +export const TelemetryCredentialVectronicRecord = TelemetryCredentialVectronicModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type TelemetryCredentialVectronicRecord = z.infer; diff --git a/api/src/database-models/telemetry_lotek.ts b/api/src/database-models/telemetry_lotek.ts new file mode 100644 index 0000000000..df4076b4bb --- /dev/null +++ b/api/src/database-models/telemetry_lotek.ts @@ -0,0 +1,56 @@ +import { z } from 'zod'; + +/** + * Telemetry Lotek Model. + * + * @description Data model for `telemetry_lotek`. + */ +export const TelemetryLotekModel = z.object({ + telemetry_lotek_id: z.string().uuid(), + device_key: z.string(), + + recdatetime: z.string().nullable(), // Timestamp the telemetry was recorded - use this + uploadtimestamp: z.string().nullable(), // Timestamp telemetry was uploaded - ignore this + channelstatus: z.string().nullable(), + latitude: z.number().nullable(), + longitude: z.number().nullable(), + altitude: z.number().nullable(), + ecefx: z.number().nullable(), + ecefy: z.number().nullable(), + ecefz: z.number().nullable(), + rxstatus: z.number().nullable(), + pdop: z.number().nullable(), + mainv: z.number().nullable(), + bkupv: z.number().nullable(), + temperature: z.number().nullable(), + fixduration: z.number().nullable(), + bhastempvoltage: z.boolean().nullable(), + devname: z.string().nullable().nullable(), + deltatime: z.number().nullable(), + fixtype: z.number().nullable(), + cepradius: z.number().nullable(), + crc: z.number().nullable(), + deviceid: z.number().nullable(), + geography: z.string().nullable().nullable(), + + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +/** + * Telemetry Lotek Record. + * + * @description Data record for `telemetry_lotek`. + */ +export const TelemetryLotekRecord = TelemetryLotekModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type TelemetryLotekRecord = z.infer; diff --git a/api/src/database-models/telemetry_manual.ts b/api/src/database-models/telemetry_manual.ts new file mode 100644 index 0000000000..b8542778d3 --- /dev/null +++ b/api/src/database-models/telemetry_manual.ts @@ -0,0 +1,36 @@ +import { z } from 'zod'; +/** + * Telemetry Manual Model. + * + * @description Data model for `telemetry_manual`. + */ +export const TelemetryManualModel = z.object({ + telemetry_manual_id: z.string().uuid(), + deployment_id: z.number(), + latitude: z.number(), + longitude: z.number(), + acquisition_date: z.string(), + transmission_date: z.string().nullable(), + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type TelemetryManualModel = z.infer; + +/** + * Telemetry Manual Record. + * + * @description Data record for `telemetry_manual`. + */ +export const TelemetryManualRecord = TelemetryManualModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type TelemetryManualRecord = z.infer; diff --git a/api/src/database-models/telemetry_vectronic.ts b/api/src/database-models/telemetry_vectronic.ts new file mode 100644 index 0000000000..46f46e52c5 --- /dev/null +++ b/api/src/database-models/telemetry_vectronic.ts @@ -0,0 +1,83 @@ +import { z } from 'zod'; + +/** + * Telemetry Vectronic Model. + * + * @description Data model for `telemetry_vectronic`. + */ +export const TelemetryVectronicModel = z.object({ + telemetry_vectronic_id: z.string().uuid(), + device_key: z.string(), + idposition: z.number(), + idcollar: z.number(), + acquisitiontime: z.string().nullable(), + scts: z.string().nullable(), + origincode: z.string().nullable(), + ecefx: z.number().nullable(), + ecefy: z.number().nullable(), + ecefz: z.number().nullable(), + latitude: z.number().nullable(), + longitude: z.number().nullable(), + height: z.number().nullable(), + dop: z.number().nullable(), + idfixtype: z.number().nullable(), + positionerror: z.number().nullable(), + satcount: z.number().nullable(), + + ch01satid: z.number().nullable(), + ch01satcnr: z.number().nullable(), + ch02satid: z.number().nullable(), + ch02satcnr: z.number().nullable(), + ch03satid: z.number().nullable(), + ch03satcnr: z.number().nullable(), + ch04satid: z.number().nullable(), + ch04satcnr: z.number().nullable(), + ch05satid: z.number().nullable(), + ch05satcnr: z.number().nullable(), + ch06satid: z.number().nullable(), + ch06satcnr: z.number().nullable(), + ch07satid: z.number().nullable(), + ch07satcnr: z.number().nullable(), + ch08satid: z.number().nullable(), + ch08satcnr: z.number().nullable(), + ch09satid: z.number().nullable(), + ch09satcnr: z.number().nullable(), + ch10satid: z.number().nullable(), + ch10satcnr: z.number().nullable(), + ch11satid: z.number().nullable(), + ch11satcnr: z.number().nullable(), + ch12satid: z.number().nullable(), + ch12satcnr: z.number().nullable(), + + idmortalitystatus: z.number().nullable(), + activity: z.number().nullable().nullable(), + mainvoltage: z.number().nullable(), + backupvoltage: z.number().nullable(), + temperature: z.number().nullable(), + transformedx: z.number().nullable(), + transformedy: z.number().nullable(), + geography: z.string().nullable(), + + create_date: z.string(), + create_user: z.number(), + update_date: z.string().nullable(), + update_user: z.number().nullable(), + revision_count: z.number() +}); + +export type TelemetryVectronicModel = z.infer; + +/** + * Telemetry Vectronic Record. + * + * @description Data record for `telemetry_vectronic`. + */ +export const TelemetryVectronicRecord = TelemetryVectronicModel.omit({ + create_date: true, + create_user: true, + update_date: true, + update_user: true, + revision_count: true +}); + +export type TelemetryVectronicRecord = z.infer; diff --git a/api/src/database/db-utils.ts b/api/src/database/db-utils.ts index 687adae674..8dadcc3ffc 100644 --- a/api/src/database/db-utils.ts +++ b/api/src/database/db-utils.ts @@ -81,6 +81,14 @@ const parseError = (error: any) => { // error thrown by DB when query fails due to foreign key constraint throw new ApiExecuteSQLError('Failed to delete record due to foreign key constraint', [error]); } + + if (error.constraint === 'check_no_device_attachment_date_overlap') { + // error thrown by DB when constraint 'check_no_device_attachment_date_overlap' fails + throw new ApiExecuteSQLError( + 'This device is already being used in another deployment, and the dates overlap. Please update the conflicting deployment or adjust the deployment dates.', + [error] + ); + } } // Generic error thrown if not captured above diff --git a/api/src/database/db.test.ts b/api/src/database/db.test.ts index 8121b65403..1c1188ae51 100644 --- a/api/src/database/db.test.ts +++ b/api/src/database/db.test.ts @@ -32,7 +32,7 @@ describe('db', () => { }); it('returns a defined database pool instance if it has been initialized', () => { - initDBPool(); + initDBPool({}); const pool = getDBPool(); diff --git a/api/src/database/db.ts b/api/src/database/db.ts index 8d598f1981..f58f474ab6 100644 --- a/api/src/database/db.ts +++ b/api/src/database/db.ts @@ -72,15 +72,15 @@ let DBPool: pg.Pool | undefined; * If the pool cannot be created successfully, `process.exit(1)` is called to terminate the API. * Why? The API is of no use if the database can't be reached. * - * @param {pg.PoolConfig} [poolConfig] + * @param {pg.PoolConfig} poolConfig */ -export const initDBPool = function (poolConfig?: pg.PoolConfig): void { +export const initDBPool = function (poolConfig: pg.PoolConfig): void { if (DBPool) { // the pool has already been initialized, do nothing return; } - defaultLog.debug({ label: 'create db pool', message: 'pool config', poolConfig }); + defaultLog.debug({ label: 'create db pool', message: 'pool config', poolConfig: { ...poolConfig, password: '***' } }); try { DBPool = new pg.Pool(poolConfig); @@ -114,11 +114,13 @@ export interface IDBConnection { /** * Opens a new connection, begins a transaction, and sets the user context. * + * Note: Transaction bypassed if `config.transaction` is `false`. * Note: Does nothing if the connection is already open. * + * @param {{transaction: boolean}} [config] Optional configuration object (contains transaction flag) * @memberof IDBConnection */ - open: () => Promise; + open: (config?: { transaction: boolean }) => Promise; /** * Releases (closes) the connection. * @@ -232,6 +234,7 @@ export const getDBConnection = function (keycloakToken?: KeycloakUserInformation let _isOpen = false; let _isReleased = false; let _systemUserId: number | null = null; + let _isTransaction = false; const _token = keycloakToken; /** @@ -252,11 +255,13 @@ export const getDBConnection = function (keycloakToken?: KeycloakUserInformation /** * Opens a new connection, begins a transaction, and sets the user context. * + * Note: Transaction bypassed if `config.transaction` is `false`. * Note: Does nothing if the connection is already open. * + * @param {{transaction: boolean}} config Configuration object (contains transaction flag) * @throws {Error} if called when the DBPool has not been initialized via `initDBPool` */ - const _open = async () => { + const _open = async (config = { transaction: true }) => { if (_client || _isOpen) { return; } @@ -270,9 +275,13 @@ export const getDBConnection = function (keycloakToken?: KeycloakUserInformation _client = await pool.connect(); _isOpen = true; _isReleased = false; + _isTransaction = config.transaction; await _setUserContext(); - await _client.query('BEGIN'); + + if (config.transaction) { + await _client.query('BEGIN'); + } }; /** @@ -292,6 +301,7 @@ export const getDBConnection = function (keycloakToken?: KeycloakUserInformation _client.release(); _isOpen = false; _isReleased = true; + _isTransaction = false; }; /** @@ -304,6 +314,10 @@ export const getDBConnection = function (keycloakToken?: KeycloakUserInformation throw Error('DBConnection is not open'); } + if (!_isTransaction) { + throw Error('DBConnection is not a transaction'); + } + await _client.query('COMMIT'); }; @@ -317,6 +331,10 @@ export const getDBConnection = function (keycloakToken?: KeycloakUserInformation throw Error('DBConnection is not open'); } + if (!_isTransaction) { + throw Error('DBConnection is not a transaction'); + } + await _client.query('ROLLBACK'); }; diff --git a/api/src/errors/axios-error.ts b/api/src/errors/axios-error.ts new file mode 100644 index 0000000000..a1601a3e14 --- /dev/null +++ b/api/src/errors/axios-error.ts @@ -0,0 +1,39 @@ +import axios from 'axios'; + +interface FormattedAxiosError extends Error { + /** + * The HTTP status code of the response. + * @type {number} + */ + status: number; + /** + * The status text of the response. + * @type {string} + */ + statusText: string; + /** + * The Axios error response. + * @type {unknown} + */ + response: unknown; +} + +/** + * Attempts to format an Axios error into a simplified object. + * + * @param {unknown} error - The error to format + * @returns {*} {FormattedAxiosError} The formatted error + */ +export const formatAxiosError = (error: unknown): FormattedAxiosError => { + if (axios.isAxiosError(error)) { + return { + name: 'AxiosError', + message: error.message, + status: error.response?.status ?? 500, + statusText: error.response?.statusText ?? 'Internal Server Error', + response: error.response?.data + }; + } + + return error as FormattedAxiosError; +}; diff --git a/api/src/models/animal-view.ts b/api/src/models/animal-view.ts index c15af8b4e5..670f3f2dc3 100644 --- a/api/src/models/animal-view.ts +++ b/api/src/models/animal-view.ts @@ -21,7 +21,9 @@ export interface IAnimalAdvancedFilters { */ itis_tsn?: number; /** - * Filter results by system user id (not necessarily the user making the request). + * Filter results by system user id. + * + * Note: This is not the id of the user making the request. * * @type {number} * @memberof IAnimalAdvancedFilters diff --git a/api/src/models/bctw.ts b/api/src/models/bctw.ts deleted file mode 100644 index 2c7931f4cf..0000000000 --- a/api/src/models/bctw.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { z } from 'zod'; - -export const BctwDeployDevice = z.object({ - device_id: z.number(), - frequency: z.number().optional(), - frequency_unit: z.string().optional(), - device_make: z.string().optional(), - device_model: z.string().optional(), - attachment_start: z.string(), - attachment_end: z.string().nullable(), - critter_id: z.string(), - critterbase_start_capture_id: z.string().uuid(), - critterbase_end_capture_id: z.string().uuid().nullable(), - critterbase_end_mortality_id: z.string().uuid().nullable() -}); - -export type BctwDeployDevice = z.infer; - -export type BctwDevice = Omit & { - collar_id: string; -}; - -export const BctwDeploymentUpdate = z.object({ - deployment_id: z.string(), - attachment_start: z.string(), - attachment_end: z.string() -}); - -export type BctwDeploymentUpdate = z.infer; - -export const BctwUploadKeyxResponse = z.object({ - errors: z.array( - z.object({ - row: z.string(), - error: z.string(), - rownum: z.number() - }) - ), - results: z.array( - z.object({ - idcollar: z.number(), - comtype: z.string(), - idcom: z.string(), - collarkey: z.string(), - collartype: z.number(), - dtlast_fetch: z.string().nullable() - }) - ) -}); - -export type BctwUploadKeyxResponse = z.infer; - -export const BctwKeyXDetails = z.object({ - device_id: z.number(), - keyx: z - .object({ - idcom: z.string(), - comtype: z.string(), - idcollar: z.number(), - collarkey: z.string(), - collartype: z.number() - }) - .nullable() -}); - -export type BctwKeyXDetails = z.infer; - -export const IManualTelemetry = z.object({ - telemetry_manual_id: z.string().uuid(), - deployment_id: z.string().uuid(), - latitude: z.number(), - longitude: z.number(), - date: z.string() -}); - -export type IManualTelemetry = z.infer; - -export const BctwUser = z.object({ - keycloak_guid: z.string(), - username: z.string() -}); - -export interface ICodeResponse { - code_header_title: string; - code_header_name: string; - id: number; - code: string; - description: string; - long_description: string; -} - -export type BctwUser = z.infer; - -export interface ICreateManualTelemetry { - deployment_id: string; - latitude: number; - longitude: number; - acquisition_date: string; -} diff --git a/api/src/models/deployment-view.ts b/api/src/models/deployment-view.ts new file mode 100644 index 0000000000..562a53cc1d --- /dev/null +++ b/api/src/models/deployment-view.ts @@ -0,0 +1,25 @@ +export interface IDeploymentAdvancedFilters { + /** + * Filter results by system user id. + * + * Note: This is not the id of the user making the request. + * + * @type {number} + * @memberof IAnimalAdvancedFilters + */ + system_user_id?: number; + /** + * Filter results by deployment ids. + * + * @type {number[]} + * @memberof IDeploymentAdvancedFilters + */ + deployment_ids?: number[]; + /** + * Filter results by survey ids. + * + * @type {number[]} + * @memberof IAnimalAdvancedFilters + */ + survey_ids?: number[]; +} diff --git a/api/src/models/observation-view.ts b/api/src/models/observation-view.ts index 44fac03d3b..d90e6380a0 100644 --- a/api/src/models/observation-view.ts +++ b/api/src/models/observation-view.ts @@ -1,11 +1,61 @@ export interface IObservationAdvancedFilters { keyword?: string; + /** + * Filter results by ITIS TSNs. + * + * @type {number[]} + * @memberof IObservationAdvancedFilters + */ itis_tsns?: number[]; + /** + * Filter results by ITIS TSN. + * + * @type {number} + * @memberof IObservationAdvancedFilters + */ itis_tsn?: number; + /** + * Filter results by start date. + * + * @type {string} + * @memberof IObservationAdvancedFilters + */ start_date?: string; + /** + * Filter results by end date. + * + * @type {string} + * @memberof IObservationAdvancedFilters + */ end_date?: string; + /** + * Filter results by start time. + * + * @type {string} + * @memberof IObservationAdvancedFilters + */ start_time?: string; + /** + * Filter results by end time. + * + * @type {string} + * @memberof IObservationAdvancedFilters + */ end_time?: string; + /** + * Filter results by minimum count. + * + * @type {number} + * @memberof IObservationAdvancedFilters + */ min_count?: number; + /** + * Filter results by system user id. + * + * Note: This is not the id of the user making the request. + * + * @type {number} + * @memberof IObservationAdvancedFilters + */ system_user_id?: number; } diff --git a/api/src/models/project-view.ts b/api/src/models/project-view.ts index b8aca002c0..8a885eebbb 100644 --- a/api/src/models/project-view.ts +++ b/api/src/models/project-view.ts @@ -3,10 +3,42 @@ import { ProjectUser } from '../repositories/project-participation-repository'; import { SystemUser } from '../repositories/user-repository'; export interface IProjectAdvancedFilters { + /** + * Filter results by keyword. + * + * @type {string} + * @memberof IProjectAdvancedFilters + */ keyword?: string; + /** + * Filter results by ITIS TSN. + * + * @type {number} + * @memberof IProjectAdvancedFilters + */ itis_tsn?: number; + /** + * Filter results by ITIS TSNs + * + * @type {number[]} + * @memberof IProjectAdvancedFilters + */ itis_tsns?: number[]; + /** + * Filter results by system user id. + * + * Note: This is not the id of the user making the request. + * + * @type {number} + * @memberof IProjectAdvancedFilters + */ system_user_id?: number; + /** + * Filter results by project name. + * + * @type {string} + * @memberof IProjectAdvancedFilters + */ project_name?: string; } diff --git a/api/src/models/survey-deployment.ts b/api/src/models/survey-deployment.ts deleted file mode 100644 index 68d6d0841e..0000000000 --- a/api/src/models/survey-deployment.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { z } from 'zod'; - -export const SurveyDeployment = z.object({ - deployment_id: z.number().int(), - critter_id: z.number(), - critterbase_critter_id: z.string().optional(), - bctw_deployment_id: z.string().uuid(), - critterbase_start_capture_id: z.string().uuid().nullable(), - critterbase_end_capture_id: z.string().uuid().nullable(), - critterbase_end_mortality_id: z.string().uuid().nullable() -}); - -export type SurveyDeployment = z.infer; - -export interface ICreateSurveyDeployment extends Omit {} - -export interface IUpdateSurveyDeployment - extends Omit {} diff --git a/api/src/models/survey-view.ts b/api/src/models/survey-view.ts index 04148517e0..5f298a3461 100644 --- a/api/src/models/survey-view.ts +++ b/api/src/models/survey-view.ts @@ -10,12 +10,56 @@ import { SystemUser } from '../repositories/user-repository'; import { ITaxonomyWithEcologicalUnits } from '../services/platform-service'; export interface ISurveyAdvancedFilters { + /** + * Filter results by keyword. + * + * @type {string} + * @memberof ISurveyAdvancedFilters + */ keyword?: string; + /** + * Filter results by ITIS TSN. + * + * @type {number} + * @memberof ISurveyAdvancedFilters + */ itis_tsn?: number; + /** + * Filter results by ITIS TSNs. + * + * @type {number[]} + * @memberof ISurveyAdvancedFilters + */ itis_tsns?: number[]; + /** + * Filter results by start date. + * + * @type {string} + * @memberof ISurveyAdvancedFilters + */ start_date?: string; + /** + * Filter results by end date. + * + * @type {string} + * @memberof ISurveyAdvancedFilters + */ end_date?: string; + /** + * Filter results by survey name. + * + * @type {string} + * @memberof ISurveyAdvancedFilters + */ survey_name?: string; + /** + * Filter results by system user id. + * + * Note: This is not the id of the user making the request. + * + * @type {number} + * @memberof ISurveyAdvancedFilters + */ system_user_id?: number; } diff --git a/api/src/models/telemetry-view.ts b/api/src/models/telemetry-view.ts index 67a693dff7..81ee157c96 100644 --- a/api/src/models/telemetry-view.ts +++ b/api/src/models/telemetry-view.ts @@ -21,14 +21,30 @@ export interface IAllTelemetryAdvancedFilters { */ itis_tsn?: number; /** - * Filter results by system user id (not necessarily the user making the request). + * Filter results by start date. + * + * @type {string} + * @memberof IObservationAdvancedFilters + */ + start_date?: string; + /** + * Filter results by end date. + * + * @type {string} + * @memberof IObservationAdvancedFilters + */ + end_date?: string; + /** + * Filter results by system user id. + * + * Note: This is not the id of the user making the request. * * @type {number} * @memberof IAnimalAdvancedFilters */ system_user_id?: number; /** - * Filter results by survey ids + * Filter results by survey ids. * * @type {number[]} * @memberof IAnimalAdvancedFilters diff --git a/api/src/openapi/schemas/deployment.ts b/api/src/openapi/schemas/deployment.ts index 3af2254591..2c3c4afb08 100644 --- a/api/src/openapi/schemas/deployment.ts +++ b/api/src/openapi/schemas/deployment.ts @@ -1,115 +1,6 @@ import { OpenAPIV3 } from 'openapi-types'; import { GeoJSONFeatureCollection } from './geoJson'; -export const getDeploymentSchema: OpenAPIV3.SchemaObject = { - title: 'Deployment', - type: 'object', - // TODO: REMOVE unnecessary columns from BCTW response - additionalProperties: false, - required: [ - // BCTW properties - 'assignment_id', - 'collar_id', - 'attachment_start_date', - 'attachment_start_time', - 'attachment_end_date', - 'attachment_end_time', - 'bctw_deployment_id', - 'device_id', - 'device_make', - 'device_model', - 'frequency', - 'frequency_unit', - // SIMS properties - 'deployment_id', - 'critter_id', - 'critterbase_critter_id', - 'critterbase_start_capture_id', - 'critterbase_end_capture_id', - 'critterbase_end_mortality_id' - ], - properties: { - // BCTW properties - assignment_id: { - type: 'string', - format: 'uuid' - }, - collar_id: { - type: 'string', - description: 'Id of the collar in BCTW' - }, - attachment_start_date: { - type: 'string', - description: 'start date of the deployment.' - }, - attachment_start_time: { - type: 'string', - description: 'start time of the deployment.' - }, - attachment_end_date: { - type: 'string', - description: 'End date of the deployment.', - nullable: true - }, - attachment_end_time: { - type: 'string', - description: 'End time of the deployment.', - nullable: true - }, - bctw_deployment_id: { - type: 'string', - format: 'uuid', - description: 'Id of the deployment in BCTW. May match multiple records in BCTW' - }, - device_id: { - type: 'integer', - description: 'Id of the device, as reported by users. Not unique.' - }, - device_make: { - type: 'number', - nullable: true - }, - device_model: { - type: 'string', - nullable: true - }, - frequency: { - type: 'number', - nullable: true - }, - frequency_unit: { - type: 'number', - nullable: true - }, - // SIMS properties - deployment_id: { - type: 'integer', - description: 'Id of the deployment in the Survey.' - }, - critter_id: { - type: 'integer', - minimum: 1, - description: 'Id of the critter in the Survey' - }, - critterbase_critter_id: { - type: 'string', - format: 'uuid', - description: 'Id of the critter in Critterbase.' - }, - critterbase_start_capture_id: { - type: 'string' - }, - critterbase_end_capture_id: { - type: 'string', - nullable: true - }, - critterbase_end_mortality_id: { - type: 'string', - nullable: true - } - } -}; - const GeoJSONFeatureCollectionFeaturesItems = ( GeoJSONFeatureCollection.properties?.features as OpenAPIV3.ArraySchemaObject )?.items as OpenAPIV3.SchemaObject; diff --git a/api/src/openapi/schemas/telemetry.ts b/api/src/openapi/schemas/telemetry.ts index 3de5f7081f..4c5f6a29e5 100644 --- a/api/src/openapi/schemas/telemetry.ts +++ b/api/src/openapi/schemas/telemetry.ts @@ -1,5 +1,77 @@ import { OpenAPIV3 } from 'openapi-types'; +/** + * Normalized Telemetry Schema + * + * @see telemetry-vendor-repository.interface.ts + */ +export const TelemetrySchema: OpenAPIV3.SchemaObject = { + type: 'object', + additionalProperties: false, + required: [ + 'telemetry_id', + 'deployment_id', + 'critter_id', + 'vendor', + 'serial', + 'acquisition_date', + 'latitude', + 'longitude', + 'elevation', + 'temperature' + ], + properties: { + telemetry_id: { + type: 'string', + format: 'uuid', + description: 'The unique identifier for the telemetry point.' + }, + deployment_id: { + type: 'integer', + description: 'The unique identifier for the deployment that the telemetry point is associated with.' + }, + critter_id: { + type: 'integer', + description: 'The unique identifier for the critter that the telemetry point is associated with.' + }, + vendor: { + type: 'string', + description: 'The vendor of the telemetry point.' + }, + serial: { + type: 'string', + description: 'The serial number of the telemetry device.' + }, + acquisition_date: { + type: 'string', + description: 'The date the telemetry point was acquired.' + }, + latitude: { + type: 'number', + description: 'The latitude of the telemetry point.', + nullable: true + }, + longitude: { + type: 'number', + description: 'The longitude of the telemetry point.', + nullable: true + }, + elevation: { + type: 'number', + description: 'The elevation of the telemetry point.', + nullable: true + }, + temperature: { + type: 'number', + description: 'The temperature of the telemetry point.', + nullable: true + } + } +}; + +/** + * @deprecated Use TelemetrySchema instead + */ export const AllTelemetrySchema: OpenAPIV3.SchemaObject = { type: 'object', additionalProperties: false, diff --git a/api/src/paths/analytics/observations.ts b/api/src/paths/analytics/observations.ts index 65cbc6265e..2bd408f453 100644 --- a/api/src/paths/analytics/observations.ts +++ b/api/src/paths/analytics/observations.ts @@ -22,11 +22,7 @@ export const GET: Operation = [ discriminator: 'ProjectPermission' }, { - validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], - discriminator: 'SystemRole' - }, - { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN], + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.DATA_ADMINISTRATOR], discriminator: 'SystemRole' } ] diff --git a/api/src/paths/codes.ts b/api/src/paths/codes.ts index ebbf556be8..f3466c9b56 100644 --- a/api/src/paths/codes.ts +++ b/api/src/paths/codes.ts @@ -34,10 +34,16 @@ GET.apiDoc = { 'project_roles', 'administrative_activity_status_type', 'intended_outcomes', + 'survey_jobs', 'site_selection_strategies', + 'sample_methods', 'survey_progress', 'method_response_metrics', 'attractants', + 'observation_subcount_signs', + 'telemetry_device_makes', + 'frequency_units', + 'alert_types', 'vantages' ], properties: { @@ -384,6 +390,48 @@ GET.apiDoc = { } } }, + telemetry_device_makes: { + type: 'array', + description: 'Active telemetry device manufacturers / makes / vendors.', + items: { + type: 'object', + additionalProperties: false, + required: ['id', 'name', 'description'], + properties: { + id: { + type: 'integer', + minimum: 1 + }, + name: { + type: 'string' + }, + description: { + type: 'string' + } + } + } + }, + frequency_units: { + type: 'array', + description: 'Frequency unit types.', + items: { + type: 'object', + additionalProperties: false, + required: ['id', 'name'], + properties: { + id: { + type: 'integer', + minimum: 1 + }, + name: { + type: 'string' + }, + description: { + type: 'string' + } + } + } + }, alert_types: { type: 'array', description: 'Alert type options for system administrators managing alert messages.', diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.test.ts index d0db0b5aa2..a651ac688e 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.test.ts @@ -7,7 +7,6 @@ import * as db from '../../../../../../../database/db'; import { HTTPError } from '../../../../../../../errors/http-error'; import { SurveyTelemetryCredentialAttachment } from '../../../../../../../repositories/attachment-repository'; import { AttachmentService } from '../../../../../../../services/attachment-service'; -import { BctwKeyxService } from '../../../../../../../services/bctw-service/bctw-keyx-service'; import * as file_utils from '../../../../../../../utils/file-utils'; import { KeycloakUserInformation } from '../../../../../../../utils/keycloak-utils'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; @@ -53,7 +52,7 @@ describe('postSurveyTelemetryCredentialAttachment', () => { } }); - it('succeeds and uploads a KeyX file to BCTW', async () => { + it('successfully imports a credential file', async () => { const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); @@ -63,47 +62,6 @@ describe('postSurveyTelemetryCredentialAttachment', () => { const uploadFileToS3Stub = sinon.stub(file_utils, 'uploadFileToS3').resolves(); - const uploadKeyXStub = sinon.stub(BctwKeyxService.prototype, 'uploadKeyX').resolves(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.keycloak_token = {} as KeycloakUserInformation; - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.keyx', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as Express.Multer.File[]; - - const requestHandler = postSurveyTelemetryCredentialAttachment(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql({ survey_telemetry_credential_attachment_id: 44 }); - expect(upsertSurveyTelemetryCredentialAttachmentStub).to.be.calledOnce; - expect(uploadKeyXStub).to.be.calledOnce; - expect(uploadFileToS3Stub).to.be.calledOnce; - }); - - it('succeeds and does not upload a Cfg file to BCTW', async () => { - const dbConnectionObj = getMockDBConnection(); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - const upsertSurveyTelemetryCredentialAttachmentStub = sinon - .stub(AttachmentService.prototype, 'upsertSurveyTelemetryCredentialAttachment') - .resolves({ survey_telemetry_credential_attachment_id: 44, key: 'path/to/file/test.keyx' }); - - const uploadFileToS3Stub = sinon.stub(file_utils, 'uploadFileToS3').resolves(); - - const uploadKeyXStub = sinon.stub(BctwKeyxService.prototype, 'uploadKeyX').resolves(); - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.keycloak_token = {} as KeycloakUserInformation; @@ -127,7 +85,6 @@ describe('postSurveyTelemetryCredentialAttachment', () => { expect(mockRes.jsonValue).to.eql({ survey_telemetry_credential_attachment_id: 44 }); expect(upsertSurveyTelemetryCredentialAttachmentStub).to.be.calledOnce; - expect(uploadKeyXStub).not.to.be.called; // not called expect(uploadFileToS3Stub).to.be.calledOnce; }); @@ -135,12 +92,11 @@ describe('postSurveyTelemetryCredentialAttachment', () => { const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const mockError = new Error('A test error'); + const upsertSurveyTelemetryCredentialAttachmentStub = sinon .stub(AttachmentService.prototype, 'upsertSurveyTelemetryCredentialAttachment') - .resolves({ survey_telemetry_credential_attachment_id: 44, key: 'path/to/file/test.keyx' }); - - const mockError = new Error('A test error'); - const uploadKeyXStub = sinon.stub(BctwKeyxService.prototype, 'uploadKeyX').rejects(mockError); + .rejects(mockError); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); @@ -168,7 +124,6 @@ describe('postSurveyTelemetryCredentialAttachment', () => { expect((actualError as HTTPError).message).to.equal(mockError.message); expect(upsertSurveyTelemetryCredentialAttachmentStub).to.have.been.calledOnce; - expect(uploadKeyXStub).to.have.been.calledOnce; } }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.ts index 4c93e7440b..b4e66ade26 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/telemetry/index.ts @@ -1,14 +1,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { TELEMETRY_CREDENTIAL_ATTACHMENT_TYPE } from '../../../../../../../constants/attachments'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../database/db'; import { HTTP400 } from '../../../../../../../errors/http-error'; import { fileSchema } from '../../../../../../../openapi/schemas/file'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; import { AttachmentService } from '../../../../../../../services/attachment-service'; -import { BctwKeyxService } from '../../../../../../../services/bctw-service/bctw-keyx-service'; -import { getBctwUser } from '../../../../../../../services/bctw-service/bctw-service'; import { uploadFileToS3 } from '../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../utils/logger'; import { isValidTelementryCredentialFile } from '../../../../../../../utils/media/media-utils'; @@ -157,12 +154,6 @@ export function postSurveyTelemetryCredentialAttachment(): RequestHandler { isTelemetryCredentialFile.type ); - // Upload telemetry credential file content to BCTW (for supported file types) - if (isTelemetryCredentialFile.type === TELEMETRY_CREDENTIAL_ATTACHMENT_TYPE.KEYX) { - const bctwKeyxService = new BctwKeyxService(getBctwUser(req)); - await bctwKeyxService.uploadKeyX(rawMediaFile); - } - // Upload telemetry credential file to SIMS S3 Storage const metadata = { filename: rawMediaFile.originalname, diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.test.ts index 924fc3abd5..b6335d5e90 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.test.ts @@ -2,13 +2,7 @@ import { expect } from 'chai'; import sinon from 'sinon'; import { createDeployment } from '.'; import * as db from '../../../../../../../../database/db'; -import { - BctwDeploymentRecord, - BctwDeploymentService -} from '../../../../../../../../services/bctw-service/bctw-deployment-service'; -import { BctwService } from '../../../../../../../../services/bctw-service/bctw-service'; -import { CritterbaseService, ICapture } from '../../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../../services/deployment-service'; +import { TelemetryDeploymentService } from '../../../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../../__mocks__/db'; describe('createDeployment', () => { @@ -17,67 +11,38 @@ describe('createDeployment', () => { }); it('creates a new deployment', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockCapture: ICapture = { - capture_id: '111', - critter_id: '222', - capture_method_id: null, - capture_location_id: '333', - release_location_id: null, - capture_date: '2021-01-01', - capture_time: '12:00:00', - release_date: null, - release_time: null, - capture_comment: null, - release_comment: null - }; - - const mockDeployment: BctwDeploymentRecord = { - assignment_id: '111', - collar_id: '222', - critter_id: '333', - created_at: '2021-01-01', - created_by_user_id: '444', - updated_at: '2021-01-01', - updated_by_user_id: '555', - valid_from: '2021-01-01', - valid_to: '2021-01-01', - attachment_start: '2021-01-01', - attachment_end: '2021-01-01', - deployment_id: '666', - device_id: 777 - }; - - const getCodeStub = sinon.stub(BctwService.prototype, 'getCode').resolves([ - { - code_header_title: 'device_make', - code_header_name: 'Device Make', - id: 1, - code: 'device_make_code', - description: '', - long_description: '' - } - ]); - const insertDeploymentStub = sinon.stub(DeploymentService.prototype, 'insertDeployment').resolves(); - const createDeploymentStub = sinon - .stub(BctwDeploymentService.prototype, 'createDeployment') - .resolves(mockDeployment); - const getCaptureByIdStub = sinon.stub(CritterbaseService.prototype, 'getCaptureById').resolves(mockCapture); + sinon.stub(TelemetryDeploymentService.prototype, 'createDeployment').resolves(); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3' + }; + mockReq.body = { + device_id: 4, + frequency: 100, + frequency_unit_id: 1, + attachmentStartDard: '2021-01-01', + attachmentStartTime: '00:00', + attachmentEndDate: '2021-01-02', + attachmentEndTime: '00:00', + critterbaseStartCaptureId: '123-456-789', + critterbaseEndCaptureId: null, + critterbaseEndMortalityId: null + }; + const requestHandler = createDeployment(); await requestHandler(mockReq, mockRes, mockNext); - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(getCodeStub).to.have.been.calledTwice; - expect(insertDeploymentStub).to.have.been.calledOnce; - expect(createDeploymentStub).to.have.been.calledOnce; - expect(getCaptureByIdStub).to.have.been.calledOnce; - expect(mockRes.status).to.have.been.calledWith(201); + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; }); it('catches and re-throws errors', async () => { @@ -85,7 +50,9 @@ describe('createDeployment', () => { const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); const mockError = new Error('a test error'); - const insertDeploymentStub = sinon.stub(DeploymentService.prototype, 'insertDeployment').rejects(mockError); + const insertDeploymentStub = sinon + .stub(TelemetryDeploymentService.prototype, 'createDeployment') + .rejects(mockError); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.ts index 4e909336c5..3ae4b72625 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/index.ts @@ -1,15 +1,11 @@ -import dayjs from 'dayjs'; import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { v4 } from 'uuid'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; -import { BctwDeploymentService } from '../../../../../../../../services/bctw-service/bctw-deployment-service'; -import { BctwService, getBctwUser } from '../../../../../../../../services/bctw-service/bctw-service'; -import { CritterbaseService } from '../../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../../services/deployment-service'; +import { TelemetryDeploymentService } from '../../../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getLogger } from '../../../../../../../../utils/logger'; +import { numberOrNull } from '../../../../../../../../utils/string-utils'; const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments'); @@ -33,22 +29,31 @@ export const POST: Operation = [ ]; POST.apiDoc = { - description: - 'Creates a deployment in SIMS and BCTW. Upserts a collar in BCTW and inserts a new deployment of the resulting collar_id.', - tags: ['deployment', 'bctw', 'critterbase'], + description: 'Creates a new deployment.', + tags: ['deployment'], security: [ { Bearer: [] } ], parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, { in: 'path', name: 'surveyId', schema: { type: 'integer', minimum: 1 - } + }, + required: true }, { in: 'path', @@ -56,7 +61,8 @@ POST.apiDoc = { schema: { type: 'integer', minimum: 1 - } + }, + required: true } ], requestBody: { @@ -71,63 +77,75 @@ POST.apiDoc = { required: [ 'device_id', 'frequency', - 'frequency_unit', - 'device_make', - 'device_model', + 'frequency_unit_id', + 'attachment_start_date', + 'attachment_start_time', + 'attachment_end_date', + 'attachment_end_time', 'critterbase_start_capture_id', 'critterbase_end_capture_id', - 'critterbase_end_mortality_id', - 'attachment_end_date', - 'attachment_end_time' + 'critterbase_end_mortality_id' ], properties: { device_id: { type: 'integer', + description: 'The ID of the device.', minimum: 1 }, frequency: { type: 'number', + description: + 'The frequency of the device. Property "frequency_unit_id" must also be provided if this is provided.', nullable: true }, - frequency_unit: { - type: 'number', - nullable: true, - description: 'The ID of a BCTW frequency code.' + frequency_unit_id: { + type: 'integer', + description: + 'The ID of a frequency unit code. Property "frequency" must also be provided if this is provided.', + minimum: 1, + nullable: true }, - device_make: { - type: 'number', - description: 'The ID of a BCTW device make code.' + attachment_start_date: { + type: 'string', + description: 'Start date of the deployment (without time component).', + example: '2021-01-01' }, - device_model: { + attachment_start_time: { type: 'string', + description: 'Start time of the deployment.', + example: '12:00:00', nullable: true }, - critterbase_start_capture_id: { + attachment_end_date: { type: 'string', - description: 'Critterbase capture record when the deployment started', - format: 'uuid', + description: 'End date of the deployment (without time component).', + example: '2021-01-01', nullable: true }, - critterbase_end_capture_id: { + attachment_end_time: { type: 'string', - description: 'Critterbase capture record when the deployment ended', - format: 'uuid', + description: 'End time of the deployment.', + example: '12:00:00', nullable: true }, - critterbase_end_mortality_id: { + critterbase_start_capture_id: { type: 'string', - description: 'Critterbase mortality record when the deployment ended', - format: 'uuid', - nullable: true + description: + 'Critterbase capture event. The capture event during which the device was attached to the animal.', + format: 'uuid' }, - attachment_end_date: { + critterbase_end_capture_id: { type: 'string', - description: 'End date of the deployment, without time.', + description: + 'Critterbase capture event. The capture event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', + format: 'uuid', nullable: true }, - attachment_end_time: { + critterbase_end_mortality_id: { type: 'string', - description: 'End time of the deployment.', + description: + 'Critterbase mortality event. The mortality event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', + format: 'uuid', nullable: true } } @@ -136,24 +154,8 @@ POST.apiDoc = { } }, responses: { - 201: { - description: 'Responds with the created BCTW deployment uuid.', - content: { - 'application/json': { - schema: { - title: 'Deployment response object', - type: 'object', - additionalProperties: false, - properties: { - deploymentId: { - type: 'string', - format: 'uuid', - description: 'The generated deployment Id, indicating that the deployment was succesfully created.' - } - } - } - } - } + 200: { + description: 'Deployment created OK.' }, 400: { $ref: '#/components/responses/400' @@ -173,80 +175,55 @@ POST.apiDoc = { } }; +/** + * Creates a new deployment. + * + * @export + * @return {*} {RequestHandler} + */ export function createDeployment(): RequestHandler { return async (req, res) => { - const surveyCritterId = Number(req.params.critterId); - - // Create deployment Id for joining SIMS and BCTW deployment information - const newDeploymentId = v4(); - - const { - device_id, - frequency, - frequency_unit, - device_make, - device_model, - critterbase_start_capture_id, - critterbase_end_capture_id, - critterbase_end_mortality_id, - attachment_end_date, - attachment_end_time - } = req.body; + const surveyId = Number(req.params.surveyId); + const critterId = Number(req.params.critterId); + + const deviceId = Number(req.body.device_id); + const frequency = numberOrNull(req.body.frequency); + const frequencyUnitId = numberOrNull(req.body.frequency_unit_id); + const attachmentStartDate = req.body.attachment_start_date; + const attachmentStartTime = req.body.attachment_start_time; + const attachmentEndDate = req.body.attachment_end_date; + const attachmentEndTime = req.body.attachment_end_time; + const critterbaseStartCaptureId = req.body.critterbase_start_capture_id; + const critterbaseEndCaptureId = req.body.critterbase_end_capture_id; + const critterbaseEndMortalityId = req.body.critterbase_end_mortality_id; const connection = getDBConnection(req.keycloak_token); try { await connection.open(); - const user = getBctwUser(req); - - const bctwService = new BctwService(user); - const bctwDeploymentService = new BctwDeploymentService(user); - const deploymentService = new DeploymentService(connection); - const critterbaseService = new CritterbaseService(user); - - await deploymentService.insertDeployment({ - critter_id: surveyCritterId, - bctw_deployment_id: newDeploymentId, - critterbase_start_capture_id, - critterbase_end_capture_id, - critterbase_end_mortality_id - }); - - // Retrieve the capture to get the capture date for BCTW - const critterbaseCritter = await critterbaseService.getCaptureById(critterbase_start_capture_id); - - // Create attachment end date from provided end date (if not null) and end time (if not null). - const attachmentEnd = attachment_end_date - ? attachment_end_time - ? dayjs(`${attachment_end_date} ${attachment_end_time}`).toISOString() - : dayjs(`${attachment_end_date}`).toISOString() - : null; + const telemetryDeploymentService = new TelemetryDeploymentService(connection); - // Get BCTW code values - const [deviceMakeCodes, frequencyUnitCodes] = await Promise.all([ - bctwService.getCode('device_make'), - bctwService.getCode('frequency_unit') - ]); - // The BCTW API expects the device make and frequency unit as codes, not IDs. - const device_make_code = deviceMakeCodes.find((code) => code.id === device_make)?.code; - const frequency_unit_code = frequencyUnitCodes.find((code) => code.id === frequency_unit)?.code; + // TODO - Do we need to verify that the incoming 'critterbase...Id' values exist and are associated to the critter_id?? - const deployment = await bctwDeploymentService.createDeployment({ - deployment_id: newDeploymentId, - device_id: device_id, - critter_id: critterbaseCritter.critter_id, + await telemetryDeploymentService.createDeployment({ + survey_id: surveyId, + critter_id: critterId, + device_id: deviceId, frequency: frequency, - frequency_unit: frequency_unit_code, - device_make: device_make_code, - device_model: device_model, - attachment_start: critterbaseCritter.capture_date, - attachment_end: attachmentEnd // TODO: ADD SEPARATE DATE AND TIME TO BCTW + frequency_unit_id: frequencyUnitId, + attachment_start_date: attachmentStartDate, + attachment_start_time: attachmentStartTime, + attachment_end_date: attachmentEndDate, + attachment_end_time: attachmentEndTime, + critterbase_start_capture_id: critterbaseStartCaptureId, + critterbase_end_capture_id: critterbaseEndCaptureId, + critterbase_end_mortality_id: critterbaseEndMortalityId }); await connection.commit(); - return res.status(201).json({ deploymentId: deployment.deployment_id }); + return res.status(200).send(); } catch (error) { defaultLog.error({ label: 'createDeployment', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{deploymentId}/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{deploymentId}/index.test.ts deleted file mode 100644 index 7ec000876d..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{deploymentId}/index.test.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { patchDeployment } from '.'; -import * as db from '../../../../../../../../../database/db'; -import { BctwDeploymentService } from '../../../../../../../../../services/bctw-service/bctw-deployment-service'; -import { CritterbaseService, ICapture } from '../../../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../../../services/deployment-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../../../__mocks__/db'; - -describe('patchDeployment', () => { - afterEach(() => { - sinon.restore(); - }); - - it('updates an existing deployment', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockCapture: ICapture = { - capture_id: '111', - critter_id: '222', - capture_method_id: null, - capture_location_id: '333', - release_location_id: null, - capture_date: '2021-01-01', - capture_time: '12:00:00', - release_date: null, - release_time: null, - capture_comment: null, - release_comment: null - }; - - const updateDeploymentStub = sinon.stub(DeploymentService.prototype, 'updateDeployment').resolves(); - const updateBctwDeploymentStub = sinon.stub(BctwDeploymentService.prototype, 'updateDeployment'); - const getCaptureByIdStub = sinon.stub(CritterbaseService.prototype, 'getCaptureById').resolves(mockCapture); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.body = { - deployment_id: '111', - bctw_deployment_id: '222', - critterbase_start_capture_id: '333', - critterbase_end_capture_id: '444', - critterbase_end_mortality_id: '555', - attachment_end_date: '2021-01-01', - attachment_end_time: '12:00:00' - }; - - const requestHandler = patchDeployment(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(updateDeploymentStub).to.have.been.calledOnce; - expect(updateBctwDeploymentStub).to.have.been.calledOnce; - expect(getCaptureByIdStub).to.have.been.calledOnce; - expect(mockRes.status).to.have.been.calledWith(200); - }); - - it('catches and re-throws errors', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockError = new Error('a test error'); - const updateDeploymentStub = sinon.stub(DeploymentService.prototype, 'updateDeployment').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - const requestHandler = patchDeployment(); - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(actualError).to.equal(mockError); - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(updateDeploymentStub).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{deploymentId}/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{deploymentId}/index.ts deleted file mode 100644 index e1e908f20f..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{deploymentId}/index.ts +++ /dev/null @@ -1,245 +0,0 @@ -import dayjs from 'dayjs'; -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../../../constants/roles'; -import { getDBConnection } from '../../../../../../../../../database/db'; -import { authorizeRequestHandler } from '../../../../../../../../../request-handlers/security/authorization'; -import { BctwDeploymentService } from '../../../../../../../../../services/bctw-service/bctw-deployment-service'; -import { CritterbaseService, ICritterbaseUser } from '../../../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../../../services/deployment-service'; -import { getLogger } from '../../../../../../../../../utils/logger'; - -const defaultLog = getLogger( - 'paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/deployments/{bctwDeploymentId}' -); - -export const PATCH: Operation = [ - authorizeRequestHandler((req) => { - return { - or: [ - { - validProjectPermissions: [PROJECT_PERMISSION.COORDINATOR, PROJECT_PERMISSION.COLLABORATOR], - surveyId: Number(req.params.surveyId), - discriminator: 'ProjectPermission' - }, - { - validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], - discriminator: 'SystemRole' - } - ] - }; - }), - patchDeployment() -]; - -PATCH.apiDoc = { - description: 'Updates information about the start and end of a deployment.', - tags: ['critterbase'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - }, - { - in: 'path', - name: 'surveyId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - }, - { - in: 'path', - name: 'critterId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - }, - { - in: 'path', - name: 'deploymentId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - } - ], - requestBody: { - description: 'Specifies a deployment id and the new timerange to update it with.', - required: true, - content: { - 'application/json': { - schema: { - title: 'Deploy device request object', - type: 'object', - additionalProperties: false, - required: [ - 'bctw_deployment_id', - 'critterbase_start_capture_id', - 'critterbase_end_capture_id', - 'critterbase_end_mortality_id', - 'attachment_end_date', - 'attachment_end_time' - ], - properties: { - deployment_id: { - type: 'integer', - description: 'Id of the deployment in SIMS', - minimum: 1 - }, - bctw_deployment_id: { - type: 'string', - description: 'Id of the deployment in BCTW', - format: 'uuid' - }, - critterbase_start_capture_id: { - type: 'string', - description: 'Critterbase capture record for when the deployment start', - format: 'uuid', - nullable: true - }, - critterbase_end_capture_id: { - type: 'string', - description: 'Critterbase capture record for when the deployment ended', - format: 'uuid', - nullable: true - }, - critterbase_end_mortality_id: { - type: 'string', - description: 'Critterbase mortality record for when the deployment ended', - format: 'uuid', - nullable: true - }, - attachment_end_date: { - type: 'string', - description: 'End date of the deployment, without time.', - example: '2021-01-01', - pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}$', - nullable: true - }, - attachment_end_time: { - type: 'string', - description: 'End time of the deployment.', - example: '12:00:00', - pattern: '^[0-9]{2}:[0-9]{2}:[0-9]{2}$', - nullable: true - } - } - } - } - } - }, - responses: { - 200: { - description: 'Responds with count of rows created or updated in SIMS DB Deployments.', - content: { - 'application/json': { - schema: { - title: 'Deployment response object', - type: 'object', - additionalProperties: false, - properties: { - message: { - type: 'string' - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function patchDeployment(): RequestHandler { - return async (req, res) => { - const critterId = Number(req.params.critterId); - const deploymentId = Number(req.params.deploymentId); - - const connection = getDBConnection(req.keycloak_token); - - const { - bctw_deployment_id, - critterbase_start_capture_id, - critterbase_end_capture_id, - critterbase_end_mortality_id, - attachment_end_date, - attachment_end_time - } = req.body; - - try { - await connection.open(); - - const user: ICritterbaseUser = { - keycloak_guid: connection.systemUserGUID(), - username: connection.systemUserIdentifier() - }; - - const bctwDeploymentService = new BctwDeploymentService(user); - const deploymentService = new DeploymentService(connection); - const critterbaseService = new CritterbaseService(user); - - await deploymentService.updateDeployment({ - deployment_id: deploymentId, - critter_id: critterId, - critterbase_start_capture_id, - critterbase_end_capture_id, - critterbase_end_mortality_id - }); - - const capture = await critterbaseService.getCaptureById(critterbase_start_capture_id); - - // Create attachment end date from provided end date (if not null) and end time (if not null). - const attachmentEnd = attachment_end_date - ? attachment_end_time - ? dayjs(`${attachment_end_date} ${attachment_end_time}`).toISOString() - : dayjs(`${attachment_end_date}`).toISOString() - : null; - - // Update the deployment in BCTW, which works by soft deleting and inserting a new deployment record - await bctwDeploymentService.updateDeployment({ - deployment_id: bctw_deployment_id, - attachment_start: capture.capture_date, - attachment_end: attachmentEnd // TODO: ADD SEPARATE DATE AND TIME TO BCTW - }); - - await connection.commit(); - - return res.status(200).send(); - } catch (error) { - defaultLog.error({ label: 'patchDeployment', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.test.ts index f25de6819c..ebb9cd08c2 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.test.ts @@ -1,8 +1,9 @@ import { expect } from 'chai'; import sinon from 'sinon'; -import { updateSurveyCritter } from '.'; +import { getSurveyCritter, updateSurveyCritter } from '.'; import * as db from '../../../../../../../database/db'; import { HTTPError } from '../../../../../../../errors/http-error'; +import { CritterAttachmentService } from '../../../../../../../services/critter-attachment-service'; import { CritterbaseService } from '../../../../../../../services/critterbase-service'; import { SurveyCritterService } from '../../../../../../../services/survey-critter-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; @@ -96,3 +97,334 @@ describe('updateSurveyCritter', () => { } }); }); + +describe('getSurveyCritter', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns a critter from survey', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockSimsCritter = { + survey_id: 2, + critter_id: 3, + critterbase_critter_id: '333-333-333' + }; + + const mockAttachments = { + captureAttachments: [ + { + critter_capture_attachment_id: 1, + uuid: '111-111-111', + critter_id: 3, + critterbase_capture_id: '222-222-222', + file_type: 'Other', + file_name: 'moose_picture.jpg', + file_size: 100, + title: 'Moose 1', + description: 'Picture of a moose', + key: 'project/1/survey/1/critter/3/attachment/1' + } + ] + }; + + const mockCritterbaseCritter = { + critter_id: '333-333-333' + }; + + const getCritterByIdStub = sinon.stub(SurveyCritterService.prototype, 'getCritterById').resolves(mockSimsCritter); + + const findAllCritterAttachmentsStub = sinon + .stub(CritterAttachmentService.prototype, 'findAllCritterAttachments') + .resolves(mockAttachments); + + const getCritterStub = sinon.stub(CritterbaseService.prototype, 'getCritter').resolves(mockCritterbaseCritter); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3 ' + }; + mockReq.query = { + expand: ['attachments'] + }; + + const requestHandler = getSurveyCritter(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(getCritterByIdStub).to.have.been.calledOnce; + expect(findAllCritterAttachmentsStub).to.have.been.calledOnce; + expect(getCritterStub).to.have.been.calledOnce; + + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockRes.json).to.have.been.calledWith({ + attachments: mockAttachments, + ...mockCritterbaseCritter, + ...mockSimsCritter + }); + + expect(mockDBConnection.commit).to.have.been.called; + expect(mockDBConnection.release).to.have.been.called; + }); + + it('returns a critter without expanded properties', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockSimsCritter = { + survey_id: 2, + critter_id: 3, + critterbase_critter_id: '333-333-333' + }; + + const mockAttachments = { + captureAttachments: [ + { + critter_capture_attachment_id: 1, + uuid: '111-111-111', + critter_id: 3, + critterbase_capture_id: '222-222-222', + file_type: 'Other', + file_name: 'moose_picture.jpg', + file_size: 100, + title: 'Moose 1', + description: 'Picture of a moose', + key: 'project/1/survey/1/critter/3/attachment/1' + } + ] + }; + + const mockCritterbaseCritter = { + critter_id: '333-333-333' + }; + + const getCritterByIdStub = sinon.stub(SurveyCritterService.prototype, 'getCritterById').resolves(mockSimsCritter); + + const findAllCritterAttachmentsStub = sinon + .stub(CritterAttachmentService.prototype, 'findAllCritterAttachments') + .resolves(mockAttachments); + + const getCritterStub = sinon.stub(CritterbaseService.prototype, 'getCritter').resolves(mockCritterbaseCritter); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3 ' + }; + + const requestHandler = getSurveyCritter(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(getCritterByIdStub).to.have.been.calledOnce; + expect(findAllCritterAttachmentsStub).not.to.have.been.called; // attachments not fetched + expect(getCritterStub).to.have.been.calledOnce; + + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockRes.json).to.have.been.calledWith({ + ...mockCritterbaseCritter, + ...mockSimsCritter + }); + + expect(mockDBConnection.commit).to.have.been.called; + expect(mockDBConnection.release).to.have.been.called; + }); + + it('throws if a sims critter record is not found', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + // Fail to find sims critter record + const getCritterByIdStub = sinon.stub(SurveyCritterService.prototype, 'getCritterById').resolves(undefined); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3 ' + }; + mockReq.query = { + expand: ['attachments'] + }; + + const requestHandler = getSurveyCritter(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal('Critter with id 3 not found.'); + expect((actualError as HTTPError).status).to.equal(400); + + expect(getCritterByIdStub).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.have.been.called; + expect(mockDBConnection.release).to.have.been.called; + } + }); + + it('throws if a critterbase critter record is not found', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockSimsCritter = { + survey_id: 2, + critter_id: 3, + critterbase_critter_id: '333-333-333' + }; + + const mockAttachments = { + captureAttachments: [ + { + critter_capture_attachment_id: 1, + uuid: '111-111-111', + critter_id: 3, + critterbase_capture_id: '222-222-222', + file_type: 'Other', + file_name: 'moose_picture.jpg', + file_size: 100, + title: 'Moose 1', + description: 'Picture of a moose', + key: 'project/1/survey/1/critter/3/attachment/1' + } + ] + }; + + const getCritterByIdStub = sinon.stub(SurveyCritterService.prototype, 'getCritterById').resolves(mockSimsCritter); + + const findAllCritterAttachmentsStub = sinon + .stub(CritterAttachmentService.prototype, 'findAllCritterAttachments') + .resolves(mockAttachments); + + // Fail to find external critterbase record + const getCritterStub = sinon.stub(CritterbaseService.prototype, 'getCritter').resolves(undefined); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3 ' + }; + mockReq.query = { + expand: ['attachments'] + }; + + const requestHandler = getSurveyCritter(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal('Critterbase critter with id 333-333-333 not found.'); + expect((actualError as HTTPError).status).to.equal(400); + + expect(getCritterByIdStub).to.have.been.calledOnce; + expect(findAllCritterAttachmentsStub).to.have.been.calledOnce; + expect(getCritterStub).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.have.been.called; + expect(mockDBConnection.release).to.have.been.called; + } + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('test error'); + + // Fail to find sims critter record + const getCritterByIdStub = sinon.stub(SurveyCritterService.prototype, 'getCritterById').rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3 ' + }; + mockReq.query = { + expand: ['attachments'] + }; + + const requestHandler = getSurveyCritter(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal('test error'); + + expect(getCritterByIdStub).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.have.been.called; + expect(mockDBConnection.release).to.have.been.called; + } + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockSimsCritter = { + survey_id: 2, + critter_id: 3, + critterbase_critter_id: '333-333-333' + }; + + const mockAttachments = { + captureAttachments: [ + { + critter_capture_attachment_id: 1, + uuid: '111-111-111', + critter_id: 3, + critterbase_capture_id: '222-222-222', + file_type: 'Other', + file_name: 'moose_picture.jpg', + file_size: 100, + title: 'Moose 1', + description: 'Picture of a moose', + key: 'project/1/survey/1/critter/3/attachment/1' + } + ] + }; + + const mockError = new Error('test error'); + + const getCritterByIdStub = sinon.stub(SurveyCritterService.prototype, 'getCritterById').resolves(mockSimsCritter); + + const findAllCritterAttachmentsStub = sinon + .stub(CritterAttachmentService.prototype, 'findAllCritterAttachments') + .resolves(mockAttachments); + + // Fail to find external critterbase record + const getCritterStub = sinon.stub(CritterbaseService.prototype, 'getCritter').rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.params = { + projectId: '1', + surveyId: '2', + critterId: '3 ' + }; + + const requestHandler = getSurveyCritter(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal('test error'); + + expect(getCritterByIdStub).to.have.been.calledOnce; + expect(findAllCritterAttachmentsStub).not.to.have.been.calledOnce; + expect(getCritterStub).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.have.been.called; + expect(mockDBConnection.release).to.have.been.called; + } + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.ts index 80e0ca92c7..21037369a6 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/index.ts @@ -2,12 +2,15 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../database/db'; -import { HTTPError, HTTPErrorType } from '../../../../../../../errors/http-error'; +import { HTTP400, HTTPError, HTTPErrorType } from '../../../../../../../errors/http-error'; import { bulkUpdateResponse, critterBulkRequestObject } from '../../../../../../../openapi/schemas/critter'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; -import { getBctwUser } from '../../../../../../../services/bctw-service/bctw-service'; import { CritterAttachmentService } from '../../../../../../../services/critter-attachment-service'; -import { CritterbaseService, ICritterbaseUser } from '../../../../../../../services/critterbase-service'; +import { + CritterbaseService, + getCritterbaseUser, + ICritterbaseUser +} from '../../../../../../../services/critterbase-service'; import { SurveyCritterService } from '../../../../../../../services/survey-critter-service'; import { getLogger } from '../../../../../../../utils/logger'; @@ -95,7 +98,7 @@ export function updateSurveyCritter(): RequestHandler { const connection = getDBConnection(req.keycloak_token); try { await connection.open(); - const user = getBctwUser(req); + const user = getCritterbaseUser(req); if (!critterbaseCritterId) { throw new HTTPError(HTTPErrorType.BAD_REQUEST, 400, 'No external critter ID was found.'); @@ -149,23 +152,33 @@ export const GET: Operation = [ ] }; }), - getCrittersFromSurvey() + getSurveyCritter() ]; GET.apiDoc = { description: 'Gets a specific critter by its integer Critter Id', - tags: ['critterbase'], + tags: ['animal', 'critterbase'], security: [ { Bearer: [] } ], parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, { in: 'path', name: 'surveyId', schema: { - type: 'integer' + type: 'integer', + minimum: 1 }, required: true }, @@ -173,9 +186,23 @@ GET.apiDoc = { in: 'path', name: 'critterId', schema: { - type: 'integer' + type: 'integer', + minimum: 1 }, required: true + }, + { + in: 'query', + name: 'expand', + description: 'List of related resources to include in the response.', + schema: { + type: 'array', + items: { + type: 'string', + enum: ['attachments'] + } + }, + required: false } ], responses: { @@ -183,7 +210,55 @@ GET.apiDoc = { description: 'Responds with a critter', content: { 'application/json': { - schema: { type: 'object' } + schema: { + type: 'object', + required: ['critter_id', 'critterbase_critter_id', 'survey_id'], + additionalProperties: true, // Allow additional properties while critterbase portion of response is not defined + properties: { + critterbase_critter_id: { + type: 'string', + format: 'uuid' + }, + critter_id: { + type: 'integer', + minimum: 1 + }, + survey_id: { + type: 'integer', + minimum: 1 + }, + attachments: { + type: 'object', + description: + 'Attachments associated with the critter. Only included if requested via the expand query parameter.', + required: ['captureAttachments'], + properties: { + capture_attachments: { + type: 'array', + items: { + type: 'object', + required: ['attachment_id', 'attachment_type', 'attachment_url'], + additionalProperties: false, + properties: { + attachment_id: { + type: 'integer', + minimum: 1 + }, + attachment_type: { + type: 'string', + enum: ['photo', 'video'] + }, + attachment_url: { + type: 'string', + format: 'uri' + } + } + } + } + } + } + } + } } } }, @@ -205,10 +280,11 @@ GET.apiDoc = { } }; -export function getCrittersFromSurvey(): RequestHandler { +export function getSurveyCritter(): RequestHandler { return async (req, res) => { const surveyId = Number(req.params.surveyId); const critterId = Number(req.params.critterId); + const expand = (req.query.expand as string[]) ?? []; const connection = getDBConnection(req.keycloak_token); @@ -221,41 +297,47 @@ export function getCrittersFromSurvey(): RequestHandler { }; const surveyService = new SurveyCritterService(connection); - const critterbaseService = new CritterbaseService(user); const critterAttachmentService = new CritterAttachmentService(connection); + const critterbaseService = new CritterbaseService(user); const surveyCritter = await surveyService.getCritterById(surveyId, critterId); if (!surveyCritter) { - return res.status(404).json({ error: `Critter with id ${critterId} not found.` }); + throw new HTTP400(`Critter with id ${critterId} not found.`); } + const getAttachmentsPromise = expand.includes('attachments') + ? critterAttachmentService.findAllCritterAttachments(surveyCritter.critter_id).then((response) => { + return { + attachments: { + captureAttachments: response.captureAttachments + // TODO: add mortality attachments + } + }; + }) + : Promise.resolve({}); + // Get the attachments from SIMS table and the Critter from critterbase - const [atttachments, critterbaseCritter] = await Promise.all([ - critterAttachmentService.findAllCritterAttachments(surveyCritter.critter_id), + const [attachments, critterbaseCritter] = await Promise.all([ + getAttachmentsPromise, critterbaseService.getCritter(surveyCritter.critterbase_critter_id) ]); await connection.commit(); if (!critterbaseCritter || critterbaseCritter.length === 0) { - return res.status(404).json({ error: `Critter ${surveyCritter.critterbase_critter_id} not found.` }); + throw new HTTP400(`Critterbase critter with id ${surveyCritter.critterbase_critter_id} not found.`); } - const critterMapped = { - ...surveyCritter, + const response = { + ...attachments, ...critterbaseCritter, - critterbase_critter_id: surveyCritter.critterbase_critter_id, - critter_id: surveyCritter.critter_id, - attachments: { - capture_attachments: atttachments.captureAttachments - // TODO: add mortality attachments - } + ...surveyCritter }; - return res.status(200).json(critterMapped); + return res.status(200).json(response); } catch (error) { - defaultLog.error({ label: 'getCritter', message: 'error', error }); + defaultLog.error({ label: 'getSurveyCritter', message: 'error', error }); await connection.rollback(); throw error; } finally { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.test.ts deleted file mode 100644 index 6c7b80beb6..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.test.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import * as db from '../../../../../../../database/db'; -import { SurveyDeployment } from '../../../../../../../models/survey-deployment'; -import { BctwTelemetryService, IAllTelemetry } from '../../../../../../../services/bctw-service/bctw-telemetry-service'; -import { DeploymentService } from '../../../../../../../services/deployment-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; -import { getCritterTelemetry } from './telemetry'; - -describe('getCritterTelemetry', () => { - afterEach(() => { - sinon.restore(); - }); - - it('fetches telemetry object', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockSurveyDeployment: SurveyDeployment = { - deployment_id: 1, - critter_id: 123, - critterbase_critter_id: 'critter-001', - bctw_deployment_id: '111', - critterbase_start_capture_id: '222', - critterbase_end_capture_id: '333', - critterbase_end_mortality_id: '444' - }; - - const mockTelemetry: IAllTelemetry[] = [ - { - id: '123e4567-e89b-12d3-a456-426614174111', - deployment_id: '123e4567-e89b-12d3-a456-426614174222', - latitude: 37.7749, - longitude: -122.4194, - acquisition_date: '2023-10-01T12:00:00Z', - telemetry_type: 'ATS', - telemetry_id: '123e4567-e89b-12d3-a456-426614174111', - telemetry_manual_id: null - }, - { - id: '123e4567-e89b-12d3-a456-426614174333', - deployment_id: '123e4567-e89b-12d3-a456-426614174444', - latitude: 37.775, - longitude: -122.4195, - acquisition_date: '2023-10-01T12:05:00Z', - telemetry_type: 'ATS', - telemetry_id: null, - telemetry_manual_id: '123e4567-e89b-12d3-a456-426614174333' - }, - { - id: '123e4567-e89b-12d3-a456-426614174555', - deployment_id: '123e4567-e89b-12d3-a456-426614174666', - latitude: 37.7751, - longitude: -122.4196, - acquisition_date: '2023-10-01T12:10:00Z', - telemetry_type: 'MANUAL', - telemetry_id: null, - telemetry_manual_id: '123e4567-e89b-12d3-a456-426614174555' - } - ]; - - const getDeploymentForCritterIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentForCritterId') - .resolves(mockSurveyDeployment); - - const getAllTelemetryByDeploymentIdsStub = sinon - .stub(BctwTelemetryService.prototype, 'getAllTelemetryByDeploymentIds') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params.critterId = '1'; - - const requestHandler = getCritterTelemetry(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(getDeploymentForCritterIdStub).to.have.been.calledOnce; - expect(getAllTelemetryByDeploymentIdsStub).to.have.been.calledOnce; - }); - - it('catches and re-throws error', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockError = new Error('a test error'); - const getDeploymentForCritterIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentForCritterId') - .rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - const requestHandler = getCritterTelemetry(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(actualError).to.eql(mockError); - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(getDeploymentForCritterIdStub).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.ts index 326264c6ef..bae5f98ce2 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry.ts @@ -2,11 +2,10 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../database/db'; -import { AllTelemetrySchema } from '../../../../../../../openapi/schemas/telemetry'; +import { paginationRequestQueryParamSchema } from '../../../../../../../openapi/schemas/pagination'; +import { TelemetrySchema } from '../../../../../../../openapi/schemas/telemetry'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; -import { BctwTelemetryService } from '../../../../../../../services/bctw-service/bctw-telemetry-service'; -import { ICritterbaseUser } from '../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../services/deployment-service'; +import { TelemetryVendorService } from '../../../../../../../services/telemetry-services/telemetry-vendor-service'; import { getLogger } from '../../../../../../../utils/logger'; const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/critters/{critterId}/telemetry'); @@ -31,7 +30,7 @@ export const GET: Operation = [ GET.apiDoc = { description: 'Get telemetry points for a specific critter.', - tags: ['bctw'], + tags: ['telemetry'], security: [ { Bearer: [] @@ -57,7 +56,6 @@ GET.apiDoc = { { in: 'query', name: 'startDate', - required: true, schema: { type: 'string' } @@ -65,20 +63,27 @@ GET.apiDoc = { { in: 'query', name: 'endDate', - required: true, schema: { type: 'string' } - } + }, + ...paginationRequestQueryParamSchema ], responses: { 200: { - description: 'Responds with count of rows created in SIMS DB Deployments.', + description: 'Responds with telemetry points for a specific critter.', content: { 'application/json': { schema: { - type: 'array', - items: AllTelemetrySchema + type: 'object', + required: ['telemetry'], + additionalProperties: false, + properties: { + telemetry: { + type: 'array', + items: TelemetrySchema + } + } } } } @@ -105,32 +110,25 @@ export function getCritterTelemetry(): RequestHandler { return async (req, res) => { const critterId = Number(req.params.critterId); const surveyId = Number(req.params.surveyId); + const startDate = req.query.startDate && String(req.query.startDate); + const endDate = req.query.endDate && String(req.query.endDate); const connection = getDBConnection(req.keycloak_token); try { await connection.open(); - const user: ICritterbaseUser = { - keycloak_guid: connection.systemUserGUID(), - username: connection.systemUserIdentifier() - }; - - // TODO: Telemetry data should only ever be fetched by deployment Ids. To get telemetry for an animal, first find the - // relevant deployment Id, then fetch data for that deployment Id. - const deploymentService = new DeploymentService(connection); - const bctwTelemetryService = new BctwTelemetryService(user); + const telemetryVendorService = new TelemetryVendorService(connection); - const { bctw_deployment_id } = await deploymentService.getDeploymentForCritterId(surveyId, critterId); - - // const startDate = new Date(String(req.query.startDate)); - // const endDate = new Date(String(req.query.endDate)); - - // TODO: Add start and end date filters received in the SIMS request to the BCTW request - const points = await bctwTelemetryService.getAllTelemetryByDeploymentIds([bctw_deployment_id]); + const telemetry = await telemetryVendorService.getTelemetryForCritter(surveyId, critterId, { + dateRange: { + startDate, + endDate + } + }); await connection.commit(); - return res.status(200).json(points); + return res.status(200).json({ telemetry: telemetry }); } catch (error) { defaultLog.error({ label: 'telemetry', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.test.ts index 7b3da2a723..35cad07d04 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.test.ts @@ -3,9 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../../../database/db'; -import { BctwDeploymentService } from '../../../../../../services/bctw-service/bctw-deployment-service'; -import { DeploymentService } from '../../../../../../services/deployment-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../.././../../__mocks__/db'; +import { TelemetryDeploymentService } from '../../../../../../services/telemetry-services/telemetry-deployment-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../__mocks__/db'; import { deleteDeploymentsInSurvey } from './delete'; chai.use(sinonChai); @@ -15,9 +14,9 @@ describe('deleteDeploymentsInSurvey', () => { sinon.restore(); }); - it('should delete all provided deployment records from sims and bctw', async () => { - const dbConnectionObj = getMockDBConnection(); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + it('should delete all provided deployment records', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); @@ -29,21 +28,21 @@ describe('deleteDeploymentsInSurvey', () => { deployment_ids: [3, 4] }; - const mockDeleteSimsDeploymentResponse = { bctw_deployment_id: '123-456-789' }; - - sinon.stub(DeploymentService.prototype, 'deleteDeployment').resolves(mockDeleteSimsDeploymentResponse); - sinon.stub(BctwDeploymentService.prototype, 'deleteDeployment').resolves(); + sinon.stub(TelemetryDeploymentService.prototype, 'deleteDeployments').resolves(); const requestHandler = deleteDeploymentsInSurvey(); await requestHandler(mockReq, mockRes, mockNext); expect(mockRes.statusValue).to.equal(200); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; }); it('should catch and re-throw an error', async () => { - const dbConnectionObj = getMockDBConnection(); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); @@ -55,11 +54,9 @@ describe('deleteDeploymentsInSurvey', () => { deployment_ids: [3, 4] }; - const mockDeleteSimsDeploymentResponse = { bctw_deployment_id: '123-456-789' }; const mockError = new Error('test error'); - sinon.stub(DeploymentService.prototype, 'deleteDeployment').resolves(mockDeleteSimsDeploymentResponse); - sinon.stub(BctwDeploymentService.prototype, 'deleteDeployment').throws(mockError); + sinon.stub(TelemetryDeploymentService.prototype, 'deleteDeployments').rejects(mockError); const requestHandler = deleteDeploymentsInSurvey(); try { @@ -67,6 +64,9 @@ describe('deleteDeploymentsInSurvey', () => { expect.fail(); } catch (actualError) { expect(actualError).to.eql(mockError); + + expect(mockDBConnection.rollback).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; } }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.ts index 0a5b50ca65..eecf823f4b 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/delete.ts @@ -3,9 +3,7 @@ import { Operation } from 'express-openapi'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; -import { BctwDeploymentService } from '../../../../../../services/bctw-service/bctw-deployment-service'; -import { ICritterbaseUser } from '../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../services/deployment-service'; +import { TelemetryDeploymentService } from '../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getLogger } from '../../../../../../utils/logger'; const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/delete'); @@ -30,8 +28,8 @@ export const POST: Operation = [ ]; POST.apiDoc = { - description: 'Delete deployments from a survey.', - tags: ['deployment', 'bctw'], + description: 'Delete deployments.', + tags: ['deployment'], security: [ { Bearer: [] @@ -64,6 +62,8 @@ POST.apiDoc = { 'application/json': { schema: { type: 'object', + required: ['deployment_ids'], + additionalProperties: false, properties: { deployment_ids: { type: 'array', @@ -104,7 +104,7 @@ POST.apiDoc = { }; /** - * Delete deployments from a survey. + * Deletes deployments. * * @export * @return {*} {RequestHandler} @@ -119,20 +119,9 @@ export function deleteDeploymentsInSurvey(): RequestHandler { try { await connection.open(); - const user: ICritterbaseUser = { - keycloak_guid: connection.systemUserGUID(), - username: connection.systemUserIdentifier() - }; + const telemetryDeploymentService = new TelemetryDeploymentService(connection); - const deletePromises = deploymentIds.map(async (deploymentId) => { - const deploymentService = new DeploymentService(connection); - const { bctw_deployment_id } = await deploymentService.deleteDeployment(surveyId, deploymentId); - - const bctwDeploymentService = new BctwDeploymentService(user); - await bctwDeploymentService.deleteDeployment(bctw_deployment_id); - }); - - await Promise.all(deletePromises); + await telemetryDeploymentService.deleteDeployments(surveyId, deploymentIds); await connection.commit(); @@ -140,7 +129,6 @@ export function deleteDeploymentsInSurvey(): RequestHandler { } catch (error) { defaultLog.error({ label: 'deleteDeploymentsInSurvey', message: 'error', error }); await connection.rollback(); - throw error; } finally { connection.release(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.test.ts index a6ca4e7545..028a76bcdc 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.test.ts @@ -2,12 +2,7 @@ import { expect } from 'chai'; import sinon from 'sinon'; import { getDeploymentsInSurvey } from '.'; import * as db from '../../../../../../database/db'; -import { SurveyDeployment } from '../../../../../../models/survey-deployment'; -import { - BctwDeploymentRecordWithDeviceMeta, - BctwDeploymentService -} from '../../../../../../services/bctw-service/bctw-deployment-service'; -import { DeploymentService } from '../../../../../../services/deployment-service'; +import { TelemetryDeploymentService } from '../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../__mocks__/db'; describe('getDeploymentsInSurvey', () => { @@ -19,308 +14,59 @@ describe('getDeploymentsInSurvey', () => { const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockSIMSDeployments = [ + const mockDeployments = [ { - deployment_id: 3, + // deployment data + deployment_id: 1, + survey_id: 66, critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', + device_id: 3, + frequency: 150.0, + frequency_unit_id: 1, + device_key: 'lotek:12345', + attachment_start_date: '2020-01-01', + attachment_start_time: '00:00:00', + attachment_start_timestamp: '2020-01-01 00:00:00', + attachment_end_date: '2020-01-02', + attachment_end_time: '12:12:12', + attachment_end_timestamp: '2020-01-01 00:00:00', + critterbase_start_capture_id: 'start123', critterbase_end_capture_id: null, - critterbase_end_mortality_id: null + critterbase_end_mortality_id: null, + // device data + serial: '1234', + device_make_id: 1, + model: 'ModelX', + // critter data + critterbase_critter_id: 'critter123' } ]; - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01T00:00:00', - attachment_end: '2020-01-02T12:12:12', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentsForSurveyIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentsForSurveyId') - .resolves(mockSIMSDeployments); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); + sinon.stub(TelemetryDeploymentService.prototype, 'getDeploymentsForSurvey').resolves(mockDeployments); + sinon.stub(TelemetryDeploymentService.prototype, 'getDeploymentsCount').resolves(1); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { - projectId: '55', - surveyId: '66' + projectId: '1', + surveyId: '2' }; const requestHandler = getDeploymentsInSurvey(); await requestHandler(mockReq, mockRes, mockNext); - expect(getDeploymentsForSurveyIdStub).calledOnceWith(66); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); expect(mockRes.json).to.have.been.calledOnceWith({ - deployments: [ - { - // BCTW properties - assignment_id: mockBCTWDeployments[0].assignment_id, - collar_id: mockBCTWDeployments[0].collar_id, - attachment_start_date: '2020-01-01', - attachment_start_time: '00:00:00', - attachment_end_date: '2020-01-02', - attachment_end_time: '12:12:12', - bctw_deployment_id: mockBCTWDeployments[0].deployment_id, - device_id: mockBCTWDeployments[0].device_id, - device_make: mockBCTWDeployments[0].device_make, - device_model: mockBCTWDeployments[0].device_model, - frequency: mockBCTWDeployments[0].frequency, - frequency_unit: mockBCTWDeployments[0].frequency_unit, - // SIMS properties - deployment_id: mockSIMSDeployments[0].deployment_id, - critter_id: mockSIMSDeployments[0].critter_id, - critterbase_critter_id: mockSIMSDeployments[0].critterbase_critter_id, - critterbase_start_capture_id: mockSIMSDeployments[0].critterbase_start_capture_id, - critterbase_end_capture_id: mockSIMSDeployments[0].critterbase_end_capture_id, - critterbase_end_mortality_id: mockSIMSDeployments[0].critterbase_end_mortality_id - } - ], - bad_deployments: [] - }); - expect(mockRes.status).calledOnceWith(200); - expect(mockDBConnection.release).to.have.been.calledOnce; - }); - - it('returns early an empty array if no SIMS deployment records for survey', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockSIMSDeployments: SurveyDeployment[] = []; // no SIMS deployment records - - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentsForSurveyIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentsForSurveyId') - .resolves(mockSIMSDeployments); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '55', - surveyId: '66' - }; - - const requestHandler = getDeploymentsInSurvey(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(getDeploymentsForSurveyIdStub).calledOnceWith(66); - expect(getDeploymentsByIdsStub).not.to.have.been.called; - expect(mockRes.json).calledOnceWith({ deployments: [], bad_deployments: [] }); - expect(mockRes.status).calledOnceWith(200); - expect(mockDBConnection.release).to.have.been.calledOnce; - }); - - it('returns bad deployment records if more than 1 active deployment found in BCTW for a single SIMS deployment record', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockSIMSDeployments = [ - { - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', - critterbase_end_capture_id: null, - critterbase_end_mortality_id: null - } - ]; - - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - }, - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentsForSurveyIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentsForSurveyId') - .resolves(mockSIMSDeployments); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '55', - surveyId: '66' - }; - - const requestHandler = getDeploymentsInSurvey(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(getDeploymentsForSurveyIdStub).calledOnceWith(66); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); - expect(mockRes.json).calledOnceWith({ - deployments: [], - bad_deployments: [ - { - name: 'BCTW Data Error', - message: 'Multiple active deployments found for the same deployment ID, when only one should exist.', - data: { - sims_deployment_id: 3, - bctw_deployment_id: '444' - } - } - ] - }); - expect(mockRes.status).calledOnceWith(200); - expect(mockDBConnection.release).to.have.been.calledOnce; - }); - - it('returns bad deployment records if no active deployment found in BCTW for a single SIMS deployment record', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockSIMSDeployments = [ - { - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', - critterbase_end_capture_id: null, - critterbase_end_mortality_id: null + deployments: mockDeployments, + count: 1, + pagination: { + total: 1, + per_page: 1, + current_page: 1, + last_page: 1, + sort: undefined, + order: undefined } - ]; - - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444_no_match', // different deployment ID - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentsForSurveyIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentsForSurveyId') - .resolves(mockSIMSDeployments); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '55', - surveyId: '66' - }; - - const requestHandler = getDeploymentsInSurvey(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(getDeploymentsForSurveyIdStub).calledOnceWith(66); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); - expect(mockRes.json).calledOnceWith({ - deployments: [], - bad_deployments: [ - { - name: 'BCTW Data Error', - message: 'No active deployments found for deployment ID, when one should exist.', - data: { - sims_deployment_id: 3, - bctw_deployment_id: '444' - } - } - ] }); expect(mockRes.status).calledOnceWith(200); expect(mockDBConnection.release).to.have.been.calledOnce; @@ -330,32 +76,17 @@ describe('getDeploymentsInSurvey', () => { const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockSIMSDeployments = [ - { - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', - critterbase_end_capture_id: null, - critterbase_end_mortality_id: null - } - ]; - const mockError = new Error('Test error'); const getDeploymentsForSurveyIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentsForSurveyId') - .resolves(mockSIMSDeployments); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') + .stub(TelemetryDeploymentService.prototype, 'getDeploymentsForSurvey') .rejects(mockError); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { - projectId: '55', - surveyId: '66' + projectId: '1', + surveyId: '2' }; const requestHandler = getDeploymentsInSurvey(); @@ -364,9 +95,8 @@ describe('getDeploymentsInSurvey', () => { await requestHandler(mockReq, mockRes, mockNext); expect.fail(); } catch (actualError) { + expect(getDeploymentsForSurveyIdStub).calledOnce; expect(actualError).to.equal(mockError); - expect(getDeploymentsForSurveyIdStub).calledOnceWith(66); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); expect(mockDBConnection.release).to.have.been.calledOnce; } }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.ts index 5ce785b665..156d76c672 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/index.ts @@ -1,17 +1,19 @@ -import dayjs from 'dayjs'; import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { DefaultDateFormat, DefaultTimeFormat } from '../../../../../../constants/dates'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../database/db'; -import { getDeploymentSchema } from '../../../../../../openapi/schemas/deployment'; -import { WarningSchema, warningSchema } from '../../../../../../openapi/schemas/warning'; +import { + paginationRequestQueryParamSchema, + paginationResponseSchema +} from '../../../../../../openapi/schemas/pagination'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; -import { BctwDeploymentService } from '../../../../../../services/bctw-service/bctw-deployment-service'; -import { ICritterbaseUser } from '../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../services/deployment-service'; -import { isFeatureFlagPresent } from '../../../../../../utils/feature-flag-utils'; +import { TelemetryDeploymentService } from '../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getLogger } from '../../../../../../utils/logger'; +import { + ensureCompletePaginationOptions, + makePaginationOptionsFromRequest, + makePaginationResponse +} from '../../../../../../utils/pagination'; const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/index'); @@ -39,8 +41,8 @@ export const GET: Operation = [ ]; GET.apiDoc = { - description: 'Returns information about all deployments under this survey.', - tags: ['deployment', 'bctw'], + description: 'Gets all deployments in a survey.', + tags: ['deployment'], security: [ { Bearer: [] @@ -64,7 +66,8 @@ GET.apiDoc = { minimum: 1 }, required: true - } + }, + ...paginationRequestQueryParamSchema ], responses: { 200: { @@ -77,12 +80,153 @@ GET.apiDoc = { deployments: { title: 'Deployments', type: 'array', - items: getDeploymentSchema + items: { + title: 'Deployment', + type: 'object', + additionalProperties: false, + required: [ + // deployment data + 'deployment_id', + 'survey_id', + 'critter_id', + 'device_id', + 'device_key', + 'frequency', + 'frequency_unit_id', + 'attachment_start_date', + 'attachment_start_time', + 'attachment_start_timestamp', + 'attachment_end_date', + 'attachment_end_time', + 'attachment_end_timestamp', + 'critterbase_start_capture_id', + 'critterbase_end_capture_id', + 'critterbase_end_mortality_id', + // device data + 'device_make_id', + 'model', + // critter data + 'critterbase_critter_id' + ], + properties: { + deployment_id: { + type: 'integer', + description: 'Id of the deployment in the Survey.' + }, + survey_id: { + type: 'integer', + minimum: 1 + }, + critter_id: { + type: 'integer', + minimum: 1, + description: 'Id of the critter in the Survey' + }, + device_id: { + type: 'integer', + description: 'Id of the device, as reported by users. Not unique.' + }, + device_key: { + type: 'string', + description: 'Generated: Device make + device serial.', + example: 'lotek:123456' + }, + frequency: { + type: 'number', + description: + 'The frequency of the device. Property "frequency_unit_id" must also be provided if this is provided.', + nullable: true + }, + frequency_unit_id: { + type: 'integer', + description: + 'The ID of a frequency unit code. Property "frequency" must also be provided if this is provided.', + minimum: 1, + nullable: true + }, + attachment_start_date: { + type: 'string', + description: 'start date of the deployment.', + example: '2021-01-01' + }, + attachment_start_time: { + type: 'string', + description: 'start time of the deployment.', + example: '12:00:00', + nullable: true + }, + attachment_start_timestamp: { + type: 'string', + description: 'Generated: start timestamp of the deployment.', + example: '2021-01-01 12:00:00' + }, + attachment_end_date: { + type: 'string', + description: 'End date of the deployment.', + example: '2021-01-01', + nullable: true + }, + attachment_end_time: { + type: 'string', + description: 'End time of the deployment.', + example: '12:00:00', + nullable: true + }, + attachment_end_timestamp: { + type: 'string', + description: 'Generated: end timestamp of the deployment.', + example: '2021-01-01 12:00:00', + nullable: true + }, + critterbase_start_capture_id: { + type: 'string', + description: + 'Critterbase capture event. The capture event during which the device was attached to the animal.', + format: 'uuid', + nullable: true + }, + critterbase_end_capture_id: { + type: 'string', + description: + 'Critterbase capture event. The capture event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', + format: 'uuid', + nullable: true + }, + critterbase_end_mortality_id: { + type: 'string', + description: + 'Critterbase mortality event. The mortality event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', + format: 'uuid', + nullable: true + }, + // device data + serial: { + type: 'string', + description: 'Serial number of the device.' + }, + device_make_id: { + type: 'integer', + minimum: 1, + nullable: true + }, + model: { + type: 'string', + nullable: true + }, + // critter data + critterbase_critter_id: { + type: 'string', + format: 'uuid', + description: 'Id of the critter in Critterbase.' + } + } + } }, - bad_deployments: { - type: 'array', - items: warningSchema - } + count: { + type: 'number', + description: 'Count of telemetry deployments in the respective survey.' + }, + pagination: { ...paginationResponseSchema } } } } @@ -109,6 +253,12 @@ GET.apiDoc = { } }; +/** + * Gets all deployments in a survey. + * + * @export + * @return {*} {RequestHandler} + */ export function getDeploymentsInSurvey(): RequestHandler { return async (req, res) => { const surveyId = Number(req.params.surveyId); @@ -116,123 +266,28 @@ export function getDeploymentsInSurvey(): RequestHandler { const connection = getDBConnection(req.keycloak_token); try { - await connection.open(); + const paginationOptions = makePaginationOptionsFromRequest(req); - const user: ICritterbaseUser = { - keycloak_guid: connection.systemUserGUID(), - username: connection.systemUserIdentifier() - }; + await connection.open(); - const deploymentService = new DeploymentService(connection); - const bctwDeploymentService = new BctwDeploymentService(user); + const telemetryDeploymentService = new TelemetryDeploymentService(connection); - // Fetch deployments from the deployment service for the given surveyId - const surveyDeployments = await deploymentService.getDeploymentsForSurveyId(surveyId); + const [deployments, deploymentsCount] = await Promise.all([ + telemetryDeploymentService.getDeploymentsForSurvey( + surveyId, + [], + ensureCompletePaginationOptions(paginationOptions) + ), + telemetryDeploymentService.getDeploymentsCount(surveyId) + ]); await connection.commit(); - // Extract deployment IDs from survey deployments - const deploymentIds = surveyDeployments.map((deployment) => deployment.bctw_deployment_id); - - // Return early if there are no deployments - if (!deploymentIds.length) { - // Return an empty array if there are no deployments in the survey - return res.status(200).json({ deployments: [], bad_deployments: [] }); - } - - // Fetch additional deployment details from BCTW service - const bctwDeployments = await bctwDeploymentService.getDeploymentsByIds(deploymentIds); - - const surveyDeploymentsWithBctwData = []; - - // Track deployments that exist in SIMS but have incorrect data in BCTW - const badDeployments: WarningSchema[] = []; - - // For each SIMS survey deployment record, find the matching BCTW deployment record. - // We expect exactly 1 matching record, otherwise we throw an error. - // More than 1 matching active record indicates an error in the BCTW data. - for (const surveyDeployment of surveyDeployments) { - const matchingBctwDeployments = bctwDeployments.filter( - (deployment) => deployment.deployment_id === surveyDeployment.bctw_deployment_id - ); - - // TODO: If the feature flag exists, then we allow multiple active deployments to exist for the same deployment - // ID (when normally we would return a bad deployment). - if (!isFeatureFlagPresent(['API_FF_DISABLE_MULTIPLE_ACTIVE_DEPLOYMENTS_CHECK'])) { - if (matchingBctwDeployments.length > 1) { - defaultLog.warn({ - label: 'getDeploymentById', - message: 'Multiple active deployments found for the same deployment ID, when only one should exist.', - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - }); - badDeployments.push({ - name: 'BCTW Data Error', - message: 'Multiple active deployments found for the same deployment ID, when only one should exist.', - data: { - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - } - }); - // Don't continue processing this deployment - continue; - } - } - - if (matchingBctwDeployments.length === 0) { - defaultLog.warn({ - label: 'getDeploymentById', - message: 'No active deployments found for deployment ID, when one should exist.', - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - }); - - badDeployments.push({ - name: 'BCTW Data Error', - message: 'No active deployments found for deployment ID, when one should exist.', - data: { - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - } - }); - - // Don't continue processing this deployment - continue; - } - - surveyDeploymentsWithBctwData.push({ - // BCTW properties - assignment_id: matchingBctwDeployments[0].assignment_id, - collar_id: matchingBctwDeployments[0].collar_id, - attachment_start_date: matchingBctwDeployments[0].attachment_start - ? dayjs(matchingBctwDeployments[0].attachment_start).format(DefaultDateFormat) - : null, - attachment_start_time: matchingBctwDeployments[0].attachment_start - ? dayjs(matchingBctwDeployments[0].attachment_start).format(DefaultTimeFormat) - : null, - attachment_end_date: matchingBctwDeployments[0].attachment_end - ? dayjs(matchingBctwDeployments[0].attachment_end).format(DefaultDateFormat) - : null, - attachment_end_time: matchingBctwDeployments[0].attachment_end - ? dayjs(matchingBctwDeployments[0].attachment_end).format(DefaultTimeFormat) - : null, - bctw_deployment_id: matchingBctwDeployments[0].deployment_id, - device_id: matchingBctwDeployments[0].device_id, - device_make: matchingBctwDeployments[0].device_make, - device_model: matchingBctwDeployments[0].device_model, - frequency: matchingBctwDeployments[0].frequency, - frequency_unit: matchingBctwDeployments[0].frequency_unit, - // SIMS properties - deployment_id: surveyDeployment.deployment_id, - critter_id: surveyDeployment.critter_id, - critterbase_critter_id: surveyDeployment.critterbase_critter_id, - critterbase_start_capture_id: surveyDeployment.critterbase_start_capture_id, - critterbase_end_capture_id: surveyDeployment.critterbase_end_capture_id, - critterbase_end_mortality_id: surveyDeployment.critterbase_end_mortality_id - }); - } - - return res.status(200).json({ deployments: surveyDeploymentsWithBctwData, bad_deployments: badDeployments }); + return res.status(200).json({ + deployments: deployments, + count: deploymentsCount, + pagination: makePaginationResponse(deploymentsCount, paginationOptions) + }); } catch (error) { defaultLog.error({ label: 'getDeploymentsInSurvey', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index.test.ts new file mode 100644 index 0000000000..3d52320ac8 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index.test.ts @@ -0,0 +1,57 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { getTelemetryForDeployments } from '.'; +import * as db from '../../../../../../../database/db'; +import { TelemetryVendorService } from '../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; + +describe('getTelemetryForDeployments', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getTelemetryForDeployments', () => { + it('should return telemetry data for a single deployment', async () => { + const mockDBConnection = getMockDBConnection({ + commit: sinon.stub(), + release: sinon.stub(), + open: sinon.stub(), + rollback: sinon.stub() + }); + + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const serviceStub = sinon.stub(TelemetryVendorService.prototype, 'getTelemetryForDeployments').resolves([ + { + telemetry_id: 1 + } + ] as any); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + mockReq.body = { + deployment_ids: [3, 4] + }; + + const requestHandler = getTelemetryForDeployments(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockDBConnection.open).to.have.been.calledOnce; + + expect(serviceStub).to.have.been.calledOnceWithExactly(2, [3, 4]); + expect(mockRes.json).to.have.been.calledOnceWith({ telemetry: [{ telemetry_id: 1 }] }); + expect(mockRes.status).calledOnceWithExactly(200); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.not.have.been.called; + }); + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index.ts new file mode 100644 index 0000000000..3ce6d7041a --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index.ts @@ -0,0 +1,160 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../database/db'; +import { TelemetrySchema } from '../../../../../../../openapi/schemas/telemetry'; +import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/index'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getTelemetryForDeployments() +]; + +GET.apiDoc = { + description: 'Get all telemetry for a list of deployments.', + tags: ['deployment', 'telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + description: 'Array of one or more deployment IDs to retrieve telemetry for.', + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + additionalProperties: false, + required: ['deployment_ids'], + properties: { + deployment_ids: { + type: 'array', + items: { + type: 'integer', + minimum: 1 + }, + minItems: 1 + } + } + } + } + } + }, + responses: { + 200: { + description: 'Responds with all telemetry for deployments. Includes both manual and vendor telemetry.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + telemetry: { + type: 'array', + items: { + properties: { + schema: TelemetrySchema + } + } + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Gets all telemetry for a list of deployments. + * + * @export + * @return {*} {RequestHandler} + */ +export function getTelemetryForDeployments(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const deploymentIds: number[] = req.body.deployment_ids; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + const telemetry = await telemetryVendorService.getTelemetryForDeployments(surveyId, deploymentIds); + + await connection.commit(); + + return res.status(200).json({ telemetry: telemetry }); + } catch (error) { + defaultLog.error({ label: 'getTelemetryForDeployments', message: 'error', error }); + await connection.rollback(); + + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete.test.ts new file mode 100644 index 0000000000..e32243af91 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete.test.ts @@ -0,0 +1,54 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import * as db from '../../../../../../../../database/db'; +import { TelemetryVendorService } from '../../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../../__mocks__/db'; +import { bulkDeleteManualTelemetry } from './delete'; + +describe('delete', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('bulkDeleteManualTelemetry', () => { + it('should return status 200', async () => { + const mockDBConnection = getMockDBConnection({ + commit: sinon.stub(), + release: sinon.stub(), + open: sinon.stub(), + rollback: sinon.stub() + }); + + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const serviceStub = sinon.stub(TelemetryVendorService.prototype, 'bulkDeleteManualTelemetry'); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + mockReq.body = { + telemetry_manual_ids: ['uuid1', 'uuid2'] + }; + + const requestHandler = bulkDeleteManualTelemetry(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockDBConnection.open).to.have.been.calledOnce; + + expect(serviceStub).to.have.been.calledOnceWithExactly(2, ['uuid1', 'uuid2']); + + expect(mockRes.status).calledOnceWithExactly(200); + expect(mockRes.send).calledOnceWithExactly(); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.not.have.been.called; + }); + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete.ts new file mode 100644 index 0000000000..5ab62e5d9a --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete.ts @@ -0,0 +1,142 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../../database/db'; +import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/delete'); + +export const POST: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + bulkDeleteManualTelemetry() +]; + +POST.apiDoc = { + description: 'Bulk delete manual telemetry records.', + tags: ['telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + description: 'Manual telemetry bulk delete payload.', + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + additionalProperties: false, + required: ['telemetry_manual_ids'], + properties: { + telemetry_manual_ids: { + type: 'array', + items: { + type: 'string', + format: 'uuid' + }, + minItems: 1 + } + } + } + } + } + }, + responses: { + 200: { + description: 'Responds successfully if the telemetry records were deleted.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Bulk delete manual telemetry records. + * + * @export + * @return {*} {RequestHandler} + */ +export function bulkDeleteManualTelemetry(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const telemetryManualIds: string[] = req.body.telemetry_manual_ids; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + await telemetryVendorService.bulkDeleteManualTelemetry(surveyId, telemetryManualIds); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'bulkDeleteManualTelemetry', message: 'error', error }); + await connection.rollback(); + + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index.test.ts new file mode 100644 index 0000000000..8339b5273f --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index.test.ts @@ -0,0 +1,118 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { bulkCreateManualTelemetry, bulkUpdateManualTelemetry } from '.'; +import * as db from '../../../../../../../../database/db'; +import { TelemetryVendorService } from '../../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../../__mocks__/db'; + +describe('index', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('bulkCreateManualTelemetry', () => { + it('should return status 200', async () => { + const mockDBConnection = getMockDBConnection({ + commit: sinon.stub(), + release: sinon.stub(), + open: sinon.stub(), + rollback: sinon.stub() + }); + + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const serviceStub = sinon.stub(TelemetryVendorService.prototype, 'bulkCreateManualTelemetry'); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + const mockTelemetry = [ + { + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]; + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + mockReq.body = { + telemetry: mockTelemetry + }; + + const requestHandler = bulkCreateManualTelemetry(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockDBConnection.open).to.have.been.calledOnce; + + expect(serviceStub).to.have.been.calledOnceWithExactly(2, mockTelemetry); + + expect(mockRes.status).calledOnceWithExactly(201); + expect(mockRes.send).calledOnceWithExactly(); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.not.have.been.called; + }); + }); + + describe('bulkUpdateManualTelemetry', () => { + { + it('should return status 200', async () => { + const mockDBConnection = getMockDBConnection({ + commit: sinon.stub(), + release: sinon.stub(), + open: sinon.stub(), + rollback: sinon.stub() + }); + + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const serviceStub = sinon.stub(TelemetryVendorService.prototype, 'bulkUpdateManualTelemetry'); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + const mockTelemetry = [ + { + telemetry_manual_id: 'uuid', + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]; + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + mockReq.body = { + telemetry: mockTelemetry + }; + + const requestHandler = bulkUpdateManualTelemetry(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockDBConnection.open).to.have.been.calledOnce; + + expect(serviceStub).to.have.been.calledOnceWithExactly(2, mockTelemetry); + + expect(mockRes.status).calledOnceWithExactly(200); + expect(mockRes.send).calledOnceWithExactly(); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.not.have.been.called; + }); + } + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index.ts new file mode 100644 index 0000000000..29b120946b --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index.ts @@ -0,0 +1,336 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../../constants/roles'; +import { TelemetryManualRecord } from '../../../../../../../../database-models/telemetry_manual'; +import { getDBConnection } from '../../../../../../../../database/db'; +import { CreateManualTelemetry } from '../../../../../../../../repositories/telemetry-repositories/telemetry-manual-repository.interface'; +import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/telemetry/manual/index'); + +export const POST: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + bulkCreateManualTelemetry() +]; + +POST.apiDoc = { + description: 'Bulk create manual telemetry records.', + tags: ['telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + description: 'Manual telemetry bulk create payload.', + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + additionalProperties: false, + required: ['telemetry'], + properties: { + telemetry: { + type: 'array', + items: { + type: 'object', + additionalProperties: false, + required: ['deployment_id', 'latitude', 'longitude', 'acquisition_date', 'transmission_date'], + properties: { + deployment_id: { + type: 'integer', + minimum: 1 + }, + latitude: { + type: 'number', + minimum: -90, + maximum: 90 + }, + longitude: { + type: 'number', + minimum: -180, + maximum: 180 + }, + acquisition_date: { + type: 'string' + }, + transmission_date: { + type: 'string', + nullable: true + } + } + }, + minItems: 1 + } + } + } + } + } + }, + responses: { + 200: { + description: 'Responds successfully if the telemetry records were created.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Bulk create manual telemetry records. + * + * @export + * @return {*} {RequestHandler} + */ +export function bulkCreateManualTelemetry(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const telemetry: CreateManualTelemetry[] = req.body.telemetry; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + await telemetryVendorService.bulkCreateManualTelemetry(surveyId, telemetry); + + await connection.commit(); + + return res.status(201).send(); + } catch (error) { + defaultLog.error({ label: 'bulkCreateManualTelemetry', message: 'error', error }); + await connection.rollback(); + + throw error; + } finally { + connection.release(); + } + }; +} + +export const PUT: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + bulkUpdateManualTelemetry() +]; + +PUT.apiDoc = { + description: 'Bulk update manual telemetry records.', + tags: ['telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + description: 'Manual telemetry bulk create payload.', + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + additionalProperties: false, + required: ['telemetry'], + properties: { + telemetry: { + type: 'array', + items: { + type: 'object', + additionalProperties: false, + required: [ + 'telemetry_manual_id', + 'deployment_id', + 'latitude', + 'longitude', + 'acquisition_date', + 'transmission_date' + ], + properties: { + telemetry_manual_id: { + type: 'string', + format: 'uuid' + }, + deployment_id: { + type: 'integer', + minimum: 1 + }, + latitude: { + type: 'number', + minimum: -90, + maximum: 90 + }, + longitude: { + type: 'number', + minimum: -180, + maximum: 180 + }, + acquisition_date: { + type: 'string' + }, + transmission_date: { + type: 'string', + nullable: true + } + } + }, + minItems: 1 + } + } + } + } + } + }, + responses: { + 200: { + description: 'Responds successfully if the telemetry records were updated.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Bulk update manual telemetry records. + * + * @export + * @return {*} {RequestHandler} + */ +export function bulkUpdateManualTelemetry(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const telemetry: TelemetryManualRecord[] = req.body.telemetry; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + await telemetryVendorService.bulkUpdateManualTelemetry(surveyId, telemetry); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'bulkUpdateManualTelemetry', message: 'error', error }); + await connection.rollback(); + + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.test.ts index 56304587a0..c53e5acff8 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.test.ts @@ -2,14 +2,7 @@ import { expect } from 'chai'; import sinon from 'sinon'; import { deleteDeployment, getDeploymentById, updateDeployment } from '.'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/http-error'; -import { - BctwDeploymentRecordWithDeviceMeta, - BctwDeploymentService -} from '../../../../../../../services/bctw-service/bctw-deployment-service'; -import { BctwDeviceService } from '../../../../../../../services/bctw-service/bctw-device-service'; -import { CritterbaseService, ICapture } from '../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../services/deployment-service'; +import { TelemetryDeploymentService } from '../../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; describe('getDeploymentById', () => { @@ -19,38 +12,32 @@ describe('getDeploymentById', () => { it('Gets an existing deployment', async () => { const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockRemoveDeployment = sinon.stub(DeploymentService.prototype, 'getDeploymentById').resolves({ - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockDeployment = { + deployment_id: 2, + survey_id: 3, + critter_id: 4, + device_id: 5, + device_key: 'lotek:12345', + frequency: 100, + frequency_unit_id: 1, + attachment_start_date: '2021-01-01', + attachment_start_time: '00:00', + attachment_start_timestamp: '2021-01-01 00:00', + attachment_end_date: '2021-01-02', + attachment_end_time: '00:00', + attachment_end_timestamp: '2021-01-01 00:00', + critterbase_start_capture_id: '123-456-789', critterbase_end_capture_id: null, - critterbase_end_mortality_id: null - }); - const mockBctwService = sinon.stub(BctwDeploymentService.prototype, 'getDeploymentsByIds').resolves([ - { - critter_id: '333', - assignment_id: '666', - collar_id: '777', - attachment_start: '2021-01-01', - attachment_end: '2021-01-02', - deployment_id: '444', - device_id: 888, - created_at: '2021-01-01', - created_by_user_id: '999', - updated_at: null, - updated_by_user_id: null, - valid_from: '2021-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]); + critterbase_end_mortality_id: null, + serial: '123', + device_make_id: 1, + critterbase_critter_id: 'uuid', + model: 'model' + }; + + sinon.stub(TelemetryDeploymentService.prototype, 'getDeploymentById').resolves(mockDeployment); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); @@ -64,49 +51,26 @@ describe('getDeploymentById', () => { await requestHandler(mockReq, mockRes, mockNext); - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(mockRemoveDeployment).to.have.been.calledOnce; - expect(mockBctwService).to.have.been.calledOnce; + expect(mockRes.json).to.have.been.calledWith({ deployment: mockDeployment }); expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; }); - it('throws 400 error if no SIMS deployment record matches provided deployment ID', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentByIdStub = sinon.stub(DeploymentService.prototype, 'getDeploymentById').resolves(); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); + const mockError = new Error('Test error'); + + sinon.stub(TelemetryDeploymentService.prototype, 'getDeploymentById').rejects(mockError); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { - projectId: '55', - surveyId: '66', - deploymentId: '77' + projectId: '1', + surveyId: '2', + deploymentId: '3' }; const requestHandler = getDeploymentById(); @@ -115,317 +79,115 @@ describe('getDeploymentById', () => { await requestHandler(mockReq, mockRes, mockNext); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Deployment ID does not exist.'); - expect((actualError as HTTPError).status).to.equal(400); - - expect(getDeploymentByIdStub).calledOnceWith(77); - expect(getDeploymentsByIdsStub).not.to.have.been.called; + expect(actualError).to.equal(mockError); expect(mockDBConnection.release).to.have.been.calledOnce; } }); +}); - it('returns bad deployment record if more than 1 active deployment found in BCTW for the SIMS deployment record', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(mockDBConnection); +describe('updateDeployment', () => { + afterEach(() => { + sinon.restore(); + }); - const mockSIMSDeployment = { - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', - critterbase_end_capture_id: null, - critterbase_end_mortality_id: null - }; + it('updates an existing deployment', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - }, - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444', - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentByIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentById') - .resolves(mockSIMSDeployment); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); + sinon.stub(TelemetryDeploymentService.prototype, 'updateDeployment').resolves(); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { - projectId: '55', - surveyId: '66', - deploymentId: '77' + projectId: '1', + surveyId: '2', + deploymentId: '3' }; - - const requestHandler = getDeploymentById(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(getDeploymentByIdStub).calledOnceWith(77); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); - expect(mockRes.json).calledOnceWith({ - deployment: null, - bad_deployment: { - name: 'BCTW Data Error', - message: 'Multiple active deployments found for the same deployment ID, when only one should exist.', - data: { - sims_deployment_id: 3, - bctw_deployment_id: '444' - } - } - }); - expect(mockRes.status).calledOnceWith(200); - expect(mockDBConnection.release).to.have.been.calledOnce; - }); - - it('returns bad deployment record if no active deployment found in BCTW for the SIMS deployment record', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockSIMSDeployment = { - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', + mockReq.body = { + critter_id: 4, + device_id: 5, + frequency: 100, + frequency_unit_id: 1, + attachment_start_date: '2021-01-01', + attachment_start_time: '00:00', + attachment_end_date: '2021-01-02', + attachment_end_time: '00:00', + critterbase_start_capture_id: '123-456-789', critterbase_end_capture_id: null, critterbase_end_mortality_id: null }; - const mockBCTWDeployments: BctwDeploymentRecordWithDeviceMeta[] = [ - { - critter_id: '333', - assignment_id: 'assignment1', - collar_id: 'collar1', - attachment_start: '2020-01-01', - attachment_end: '2020-01-02', - deployment_id: '444_no_match', // different deployment ID - device_id: 123, - created_at: '2020-01-01', - created_by_user_id: 'user1', - updated_at: '2020-01-01', - updated_by_user_id: 'user1', - valid_from: '2020-01-01', - valid_to: null, - device_make: 17, - device_model: 'model', - frequency: 1, - frequency_unit: 2 - } - ]; - - const getDeploymentByIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentById') - .resolves(mockSIMSDeployment); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .resolves(mockBCTWDeployments); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '55', - surveyId: '66', - deploymentId: '77' - }; - - const requestHandler = getDeploymentById(); + const requestHandler = updateDeployment(); await requestHandler(mockReq, mockRes, mockNext); - expect(getDeploymentByIdStub).calledOnceWith(77); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); - expect(mockRes.json).calledOnceWith({ - deployment: null, - bad_deployment: { - name: 'BCTW Data Error', - message: 'No active deployments found for deployment ID, when one should exist.', - data: { - sims_deployment_id: 3, - bctw_deployment_id: '444' - } - } - }); - expect(mockRes.status).calledOnceWith(200); + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; expect(mockDBConnection.release).to.have.been.calledOnce; }); it('catches and re-throws errors', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockSIMSDeployment = { - deployment_id: 3, - critter_id: 2, - critterbase_critter_id: '333', - bctw_deployment_id: '444', - critterbase_start_capture_id: '555', - critterbase_end_capture_id: null, - critterbase_end_mortality_id: null - }; - - const mockError = new Error('Test error'); + const mockError = new Error('a test error'); - const getDeploymentByIdStub = sinon - .stub(DeploymentService.prototype, 'getDeploymentById') - .resolves(mockSIMSDeployment); - const getDeploymentsByIdsStub = sinon - .stub(BctwDeploymentService.prototype, 'getDeploymentsByIds') - .rejects(mockError); + sinon.stub(TelemetryDeploymentService.prototype, 'updateDeployment').rejects(mockError); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { - projectId: '55', - surveyId: '66', - deploymentId: '77' + projectId: '1', + surveyId: '2', + deploymentId: '3' }; - const requestHandler = getDeploymentById(); - + const requestHandler = updateDeployment(); try { await requestHandler(mockReq, mockRes, mockNext); expect.fail(); } catch (actualError) { expect(actualError).to.equal(mockError); - expect(getDeploymentByIdStub).calledOnceWith(77); - expect(getDeploymentsByIdsStub).calledOnceWith(['444']); expect(mockDBConnection.release).to.have.been.calledOnce; } }); }); -describe('updateDeployment', () => { +describe('deleteDeployment', () => { afterEach(() => { sinon.restore(); }); - it('updates an existing deployment', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockCapture: ICapture = { - capture_id: '111', - critter_id: '222', - capture_method_id: null, - capture_location_id: '333', - release_location_id: null, - capture_date: '2021-01-01', - capture_time: '12:00:00', - release_date: null, - release_time: null, - capture_comment: null, - release_comment: null - }; + it('deletes an existing deployment', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockBctwDeploymentResponse = [ - { - assignment_id: '666', - collar_id: '777', - critter_id: '333', - created_at: '2021-01-01', - created_by_user_id: '999', - updated_at: null, - updated_by_user_id: null, - valid_from: '2021-01-01', - valid_to: null, - attachment_start: '2021-01-01', - attachment_end: '2021-01-02', - deployment_id: '444' - } - ]; - - const updateDeploymentStub = sinon.stub(DeploymentService.prototype, 'updateDeployment').resolves(); - const getCaptureByIdStub = sinon.stub(CritterbaseService.prototype, 'getCaptureById').resolves(mockCapture); - const updateBctwDeploymentStub = sinon - .stub(BctwDeploymentService.prototype, 'updateDeployment') - .resolves(mockBctwDeploymentResponse); - const updateCollarStub = sinon.stub(BctwDeviceService.prototype, 'updateCollar').resolves(); + sinon.stub(TelemetryDeploymentService.prototype, 'deleteDeployment').resolves(); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const requestHandler = updateDeployment(); + mockReq.params = { + projectId: '1', + surveyId: '2', + deploymentId: '3' + }; + + const requestHandler = deleteDeployment(); await requestHandler(mockReq, mockRes, mockNext); - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(updateDeploymentStub).to.have.been.calledOnce; - expect(getCaptureByIdStub).to.have.been.calledOnce; - expect(updateBctwDeploymentStub).to.have.been.calledOnce; - expect(updateCollarStub).to.have.been.calledOnce; expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; }); it('catches and re-throws errors', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); sinon.stub(db, 'getDBConnection').returns(mockDBConnection); const mockError = new Error('a test error'); - const updateDeploymentStub = sinon.stub(DeploymentService.prototype, 'updateDeployment').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - const requestHandler = updateDeployment(); - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(actualError).to.equal(mockError); - expect(updateDeploymentStub).to.have.been.calledOnce; - } - }); -}); -describe('deleteDeployment', () => { - afterEach(() => { - sinon.restore(); - }); - - it('deletes an existing deployment', async () => { - const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); - const getDBConnectionStub = sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const deleteDeploymentStub = sinon - .stub(DeploymentService.prototype, 'deleteDeployment') - .resolves({ bctw_deployment_id: '444' }); - const bctwDeleteDeploymentStub = sinon.stub(BctwDeploymentService.prototype, 'deleteDeployment'); + sinon.stub(TelemetryDeploymentService.prototype, 'deleteDeployment').rejects(mockError); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); @@ -436,12 +198,13 @@ describe('deleteDeployment', () => { }; const requestHandler = deleteDeployment(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(getDBConnectionStub).to.have.been.calledOnce; - expect(deleteDeploymentStub).to.have.been.calledOnce; - expect(bctwDeleteDeploymentStub).to.have.been.calledOnce; - expect(mockRes.status).to.have.been.calledWith(200); + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(actualError).to.equal(mockError); + expect(mockDBConnection.rollback).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + } }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.ts index f4bfe664c1..fe44d6f710 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index.ts @@ -1,24 +1,11 @@ -import { AxiosError } from 'axios'; -import dayjs from 'dayjs'; import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { DefaultDateFormat, DefaultTimeFormat } from '../../../../../../../constants/dates'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/http-error'; -import { getDeploymentSchema } from '../../../../../../../openapi/schemas/deployment'; -import { warningSchema } from '../../../../../../../openapi/schemas/warning'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; -import { BctwDeploymentService } from '../../../../../../../services/bctw-service/bctw-deployment-service'; -import { BctwDeviceService } from '../../../../../../../services/bctw-service/bctw-device-service'; -import { getBctwUser } from '../../../../../../../services/bctw-service/bctw-service'; -import { - CritterbaseService, - getCritterbaseUser, - ICritterbaseUser -} from '../../../../../../../services/critterbase-service'; -import { DeploymentService } from '../../../../../../../services/deployment-service'; +import { TelemetryDeploymentService } from '../../../../../../../services/telemetry-services/telemetry-deployment-service'; import { getLogger } from '../../../../../../../utils/logger'; +import { numberOrNull } from '../../../../../../../utils/string-utils'; const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/index'); @@ -46,8 +33,8 @@ export const GET: Operation = [ ]; GET.apiDoc = { - description: 'Returns information about a specific deployment.', - tags: ['deployment', 'bctw'], + description: 'Get a deployment.', + tags: ['deployment'], security: [ { Bearer: [] @@ -75,7 +62,6 @@ GET.apiDoc = { { in: 'path', name: 'deploymentId', - description: 'SIMS deployment ID', schema: { type: 'integer', minimum: 1 @@ -85,21 +71,154 @@ GET.apiDoc = { ], responses: { 200: { - description: 'Responds with information about a deployment under this survey.', + description: 'Responds with information about a deployment.', content: { 'application/json': { schema: { type: 'object', - required: ['deployment', 'bad_deployment'], + required: ['deployment'], additionalProperties: false, properties: { deployment: { - ...getDeploymentSchema, - nullable: true - }, - bad_deployment: { - ...warningSchema, - nullable: true + title: 'Deployment', + type: 'object', + additionalProperties: false, + required: [ + 'deployment_id', + 'survey_id', + 'critter_id', + 'device_id', + 'device_key', + 'frequency', + 'frequency_unit_id', + 'attachment_start_date', + 'attachment_start_time', + 'attachment_start_timestamp', + 'attachment_end_date', + 'attachment_end_time', + 'attachment_end_timestamp', + 'critterbase_start_capture_id', + 'critterbase_end_capture_id', + 'critterbase_end_mortality_id', + // device data + 'serial', + 'device_make_id', + 'model', + // critter data + 'critterbase_critter_id' + ], + properties: { + deployment_id: { + type: 'integer', + description: 'Id of the deployment in the Survey.' + }, + survey_id: { + type: 'integer', + minimum: 1 + }, + critter_id: { + type: 'integer', + minimum: 1, + description: 'Id of the critter in the Survey' + }, + device_key: { + type: 'string', + description: 'Generated: Device make + device serial.', + example: 'lotek:123456' + }, + device_id: { + type: 'integer', + description: 'Id of the device, as reported by users. Not unique.' + }, + frequency: { + type: 'number', + description: + 'The frequency of the device. Property "frequency_unit_id" must also be provided if this is provided.', + nullable: true + }, + frequency_unit_id: { + type: 'integer', + description: + 'The ID of a frequency unit code. Property "frequency" must also be provided if this is provided.', + minimum: 1, + nullable: true + }, + attachment_start_date: { + type: 'string', + description: 'start date of the deployment.', + example: '2021-01-01' + }, + attachment_start_time: { + type: 'string', + description: 'start time of the deployment.', + example: '12:00:00', + nullable: true + }, + attachment_start_timestamp: { + type: 'string', + description: 'Generated: start timestamp of the deployment.', + example: '2021-01-01 12:00:00' + }, + attachment_end_date: { + type: 'string', + description: 'End date of the deployment.', + example: '2021-01-01', + nullable: true + }, + attachment_end_time: { + type: 'string', + description: 'End time of the deployment.', + example: '12:00:00', + nullable: true + }, + attachment_end_timestamp: { + type: 'string', + description: 'Generated: end timestamp of the deployment.', + example: '2021-01-01 12:00:00', + nullable: true + }, + critterbase_start_capture_id: { + type: 'string', + description: + 'Critterbase capture event. The capture event during which the device was attached to the animal.', + format: 'uuid', + nullable: true + }, + critterbase_end_capture_id: { + type: 'string', + description: + 'Critterbase capture event. The capture event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', + format: 'uuid', + nullable: true + }, + critterbase_end_mortality_id: { + type: 'string', + description: + 'Critterbase mortality event. The mortality event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', + format: 'uuid', + nullable: true + }, + // device data + serial: { + type: 'string', + description: 'Serial number of the device.' + }, + device_make_id: { + type: 'integer', + minimum: 1, + nullable: true + }, + model: { + type: 'string', + nullable: true + }, + // critter data + critterbase_critter_id: { + type: 'string', + format: 'uuid', + description: 'Id of the critter in Critterbase.' + } + } } } } @@ -127,8 +246,15 @@ GET.apiDoc = { } }; +/** + * Gets a deployment. + * + * @export + * @return {*} {RequestHandler} + */ export function getDeploymentById(): RequestHandler { return async (req, res) => { + const surveyId = Number(req.params.surveyId); const deploymentId = Number(req.params.deploymentId); const connection = getDBConnection(req.keycloak_token); @@ -136,113 +262,16 @@ export function getDeploymentById(): RequestHandler { try { await connection.open(); - const user: ICritterbaseUser = { - keycloak_guid: connection.systemUserGUID(), - username: connection.systemUserIdentifier() - }; - - const deploymentService = new DeploymentService(connection); - const bctwDeploymentService = new BctwDeploymentService(user); + const telemetryDeploymentService = new TelemetryDeploymentService(connection); - // Fetch deployments from the deployment service for the given surveyId - const surveyDeployment = await deploymentService.getDeploymentById(deploymentId); + const deployment = await telemetryDeploymentService.getDeploymentById(surveyId, deploymentId); await connection.commit(); - // Return early if there are no deployments - if (!surveyDeployment) { - // Return 400 if the provided deployment ID does not exist - throw new HTTP400('Deployment ID does not exist.', [{ sims_deployment_id: deploymentId }]); - } - - // Fetch additional deployment details from BCTW service - const bctwDeployments = await bctwDeploymentService.getDeploymentsByIds([surveyDeployment.bctw_deployment_id]); - - // For the SIMS survey deployment record, find the matching BCTW deployment record. - // We expect exactly 1 matching record, otherwise we throw an error. - // More than 1 matching active record indicates an error in the BCTW data. - const matchingBctwDeployments = bctwDeployments.filter( - (deployment) => deployment.deployment_id === surveyDeployment.bctw_deployment_id - ); - - if (matchingBctwDeployments.length > 1) { - defaultLog.warn({ - label: 'getDeploymentById', - message: 'Multiple active deployments found for the same deployment ID, when only one should exist.', - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - }); - - const badDeployment = { - name: 'BCTW Data Error', - message: 'Multiple active deployments found for the same deployment ID, when only one should exist.', - data: { - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - } - }; - - // Don't continue processing this deployment - return res.status(200).json({ deployment: null, bad_deployment: badDeployment }); - } - - if (matchingBctwDeployments.length === 0) { - defaultLog.warn({ - label: 'getDeploymentById', - message: 'No active deployments found for deployment ID, when one should exist.', - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - }); - - const badDeployment = { - name: 'BCTW Data Error', - message: 'No active deployments found for deployment ID, when one should exist.', - data: { - sims_deployment_id: surveyDeployment.deployment_id, - bctw_deployment_id: surveyDeployment.bctw_deployment_id - } - }; - - // Don't continue processing this deployment - return res.status(200).json({ deployment: null, bad_deployment: badDeployment }); - } - - const surveyDeploymentWithBctwData = { - // BCTW properties - assignment_id: matchingBctwDeployments[0].assignment_id, - collar_id: matchingBctwDeployments[0].collar_id, - attachment_start_date: matchingBctwDeployments[0].attachment_start - ? dayjs(matchingBctwDeployments[0].attachment_start).format(DefaultDateFormat) - : null, - attachment_start_time: matchingBctwDeployments[0].attachment_start - ? dayjs(matchingBctwDeployments[0].attachment_start).format(DefaultTimeFormat) - : null, - attachment_end_date: matchingBctwDeployments[0].attachment_end - ? dayjs(matchingBctwDeployments[0].attachment_end).format(DefaultDateFormat) - : null, - attachment_end_time: matchingBctwDeployments[0].attachment_end - ? dayjs(matchingBctwDeployments[0].attachment_end).format(DefaultTimeFormat) - : null, - bctw_deployment_id: matchingBctwDeployments[0].deployment_id, - device_id: matchingBctwDeployments[0].device_id, - device_make: matchingBctwDeployments[0].device_make, - device_model: matchingBctwDeployments[0].device_model, - frequency: matchingBctwDeployments[0].frequency, - frequency_unit: matchingBctwDeployments[0].frequency_unit, - // SIMS properties - deployment_id: surveyDeployment.deployment_id, - critter_id: surveyDeployment.critter_id, - critterbase_critter_id: surveyDeployment.critterbase_critter_id, - critterbase_start_capture_id: surveyDeployment.critterbase_start_capture_id, - critterbase_end_capture_id: surveyDeployment.critterbase_end_capture_id, - critterbase_end_mortality_id: surveyDeployment.critterbase_end_mortality_id - }; - - return res.status(200).json({ deployment: surveyDeploymentWithBctwData, bad_deployment: null }); + return res.status(200).json({ deployment: deployment }); } catch (error) { defaultLog.error({ label: 'getDeploymentById', message: 'error', error }); await connection.rollback(); - throw error; } finally { connection.release(); @@ -270,8 +299,8 @@ export const PUT: Operation = [ ]; PUT.apiDoc = { - description: 'Updates information about the start and end of a deployment.', - tags: ['deployment', 'bctw'], + description: 'Update a deployment.', + tags: ['deployment'], security: [ { Bearer: [] @@ -299,7 +328,6 @@ PUT.apiDoc = { { in: 'path', name: 'deploymentId', - description: 'SIMS deployment ID', schema: { type: 'integer', minimum: 1 @@ -308,23 +336,22 @@ PUT.apiDoc = { } ], requestBody: { - description: 'Specifies a deployment id and the new timerange to update it with.', + description: 'Deployment data to update.', required: true, content: { 'application/json': { schema: { - title: 'Deploy device request object', type: 'object', additionalProperties: false, required: [ 'critter_id', 'device_id', + 'frequency', + 'frequency_unit_id', + 'attachment_start_date', + 'attachment_start_time', 'attachment_end_date', 'attachment_end_time', - 'device_make', - 'device_model', - 'frequency', - 'frequency_unit', 'critterbase_start_capture_id', 'critterbase_end_capture_id', 'critterbase_end_mortality_id' @@ -334,51 +361,64 @@ PUT.apiDoc = { type: 'integer', minimum: 1 }, - attachment_end_date: { - type: 'string', - description: 'End date of the deployment, without time.', - nullable: true - }, - attachment_end_time: { - type: 'string', - description: 'End time of the deployment.', - nullable: true - }, device_id: { type: 'integer', minimum: 1 }, - device_make: { + frequency: { type: 'number', + description: + 'The frequency of the device. Property "frequency_unit_id" must also be provided if this is provided.', + nullable: true + }, + frequency_unit_id: { + type: 'integer', + description: + 'The ID of a frequency unit code. Property "frequency" must also be provided if this is provided.', + minimum: 1, nullable: true }, - device_model: { + attachment_start_date: { type: 'string', + description: 'Start date of the deployment (without time component).', + example: '2021-01-01' + }, + attachment_start_time: { + type: 'string', + description: 'Start time of the deployment.', + example: '12:00:00', nullable: true }, - frequency: { - type: 'number', + attachment_end_date: { + type: 'string', + description: 'End date of the deployment (without time component).', + example: '2021-01-01', nullable: true }, - frequency_unit: { - type: 'number', + attachment_end_time: { + type: 'string', + description: 'End time of the deployment.', + example: '12:00:00', nullable: true }, critterbase_start_capture_id: { type: 'string', - description: 'Critterbase capture record when the deployment started', + description: + 'Critterbase capture event. The capture event during which the device was attached to the animal.', format: 'uuid', nullable: true }, critterbase_end_capture_id: { type: 'string', - description: 'Critterbase capture record when the deployment ended', + description: + 'Critterbase capture event. The capture event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', format: 'uuid', nullable: true }, critterbase_end_mortality_id: { type: 'string', - description: 'Critterbase mortality record when the deployment ended', + description: + 'Critterbase mortality event. The mortality event during which the device was removed from the animal. Only one of critterbase_end_capture_id or critterbase_end_mortality_id can be provided.', format: 'uuid', nullable: true } @@ -389,7 +429,7 @@ PUT.apiDoc = { }, responses: { 200: { - description: 'Deployment updated OK.' + description: 'Deployment patched OK.' }, 400: { $ref: '#/components/responses/400' @@ -409,67 +449,48 @@ PUT.apiDoc = { } }; +/** + * Updates a deployment. + * + * @export + * @return {*} {RequestHandler} + */ export function updateDeployment(): RequestHandler { return async (req, res) => { + const surveyId = Number(req.params.surveyId); const deploymentId = Number(req.params.deploymentId); - const connection = getDBConnection(req.keycloak_token); + const critterId = req.body.critter_id; + const deviceId = req.body.device_id; + const frequency = numberOrNull(req.body.frequency); + const frequencyUnitId = numberOrNull(req.body.frequency_unit_id); + const attachmentStartDate = req.body.attachment_start_date; + const attachmentStartTime = req.body.attachment_start_time; + const attachmentEndDate = req.body.attachment_end_date; + const attachmentEndTime = req.body.attachment_end_time; + const critterbaseStartCaptureId = req.body.critterbase_start_capture_id; + const critterbaseEndCaptureId = req.body.critterbase_end_capture_id; + const critterbaseEndMortalityId = req.body.critterbase_end_mortality_id; - const { - critter_id, - attachment_end_date, - attachment_end_time, - // device_id, // Do not allow the device_id to be updated - device_make, - device_model, - frequency, - frequency_unit, - critterbase_start_capture_id, - critterbase_end_capture_id, - critterbase_end_mortality_id - } = req.body; + const connection = getDBConnection(req.keycloak_token); try { await connection.open(); - // Update the deployment in SIMS - const deploymentService = new DeploymentService(connection); - const bctw_deployment_id = await deploymentService.updateDeployment({ - deployment_id: deploymentId, - critter_id: critter_id, - critterbase_start_capture_id, - critterbase_end_capture_id, - critterbase_end_mortality_id - }); - - // TODO: Decide whether to explicitly record attachment start date, or just reference the capture. Might remove this line. - const critterbaseService = new CritterbaseService(getCritterbaseUser(req)); - const capture = await critterbaseService.getCaptureById(critterbase_start_capture_id); - - // Create attachment end date from provided end date (if not null) and end time (if not null). - const attachmentEnd = attachment_end_date - ? attachment_end_time - ? dayjs(`${attachment_end_date} ${attachment_end_time}`).toISOString() - : dayjs(`${attachment_end_date}`).toISOString() - : null; - - // Update the deployment (collar_animal_assignment) in BCTW - const bctwDeploymentService = new BctwDeploymentService(getBctwUser(req)); - // Returns an array though we only expect one record - const bctwDeploymentRecords = await bctwDeploymentService.updateDeployment({ - deployment_id: bctw_deployment_id, - attachment_start: capture.capture_date, - attachment_end: attachmentEnd // TODO: ADD SEPARATE DATE AND TIME TO BCTW - }); + const telemetryDeploymentService = new TelemetryDeploymentService(connection); - // Update the collar details in BCTW - const bctwDeviceService = new BctwDeviceService(getBctwUser(req)); - await bctwDeviceService.updateCollar({ - collar_id: bctwDeploymentRecords[0].collar_id, - device_make: device_make, - device_model: device_model, + await telemetryDeploymentService.updateDeployment(surveyId, deploymentId, { + critter_id: critterId, + device_id: deviceId, frequency: frequency, - frequency_unit: frequency_unit + frequency_unit_id: frequencyUnitId, + attachment_start_date: attachmentStartDate, + attachment_start_time: attachmentStartTime, + attachment_end_date: attachmentEndDate, + attachment_end_time: attachmentEndTime, + critterbase_start_capture_id: critterbaseStartCaptureId, + critterbase_end_capture_id: critterbaseEndCaptureId, + critterbase_end_mortality_id: critterbaseEndMortalityId }); await connection.commit(); @@ -505,8 +526,8 @@ export const DELETE: Operation = [ ]; DELETE.apiDoc = { - description: 'Deletes the deployment record in SIMS, and soft deletes the record in BCTW.', - tags: ['deploymenty', 'bctw'], + description: 'Deletes a deployment.', + tags: ['deployment'], security: [ { Bearer: [] @@ -534,7 +555,6 @@ DELETE.apiDoc = { { in: 'path', name: 'deploymentId', - description: 'SIMS deployment ID', schema: { type: 'integer', minimum: 1 @@ -564,34 +584,33 @@ DELETE.apiDoc = { } }; +/** + * Deletes a deployment. + * + * @export + * @return {*} {RequestHandler} + */ export function deleteDeployment(): RequestHandler { return async (req, res) => { - const deploymentId = Number(req.params.deploymentId); const surveyId = Number(req.params.surveyId); + const deploymentId = Number(req.params.deploymentId); const connection = getDBConnection(req.keycloak_token); try { await connection.open(); - const user: ICritterbaseUser = { - keycloak_guid: connection.systemUserGUID(), - username: connection.systemUserIdentifier() - }; - - const deploymentService = new DeploymentService(connection); - const { bctw_deployment_id } = await deploymentService.deleteDeployment(surveyId, deploymentId); + const telemetryDeploymentService = new TelemetryDeploymentService(connection); - const bctwDeploymentService = new BctwDeploymentService(user); - await bctwDeploymentService.deleteDeployment(bctw_deployment_id); + await telemetryDeploymentService.deleteDeployment(surveyId, deploymentId); await connection.commit(); + return res.status(200).send(); } catch (error) { defaultLog.error({ label: 'deleteDeployment', message: 'error', error }); await connection.rollback(); - - return res.status(500).json((error as AxiosError).response); + throw error; } finally { connection.release(); } diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index.test.ts new file mode 100644 index 0000000000..7d99d56fab --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index.test.ts @@ -0,0 +1,54 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { getTelemetryForDeployment } from '.'; +import * as db from '../../../../../../../../database/db'; +import { TelemetryVendorService } from '../../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../../__mocks__/db'; + +describe('getTelemetryForDeployment', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getTelemetryForDeployment', () => { + it('should return telemetry data for a single deployment', async () => { + const mockDBConnection = getMockDBConnection({ + commit: sinon.stub(), + release: sinon.stub(), + open: sinon.stub(), + rollback: sinon.stub() + }); + + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const serviceStub = sinon.stub(TelemetryVendorService.prototype, 'getTelemetryForDeployment').resolves([ + { + telemetry_id: 1 + } + ] as any); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deploymentId: '3' + }; + + const requestHandler = getTelemetryForDeployment(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockDBConnection.open).to.have.been.calledOnce; + + expect(serviceStub).to.have.been.calledOnceWithExactly(2, 3); + expect(mockRes.json).to.have.been.calledOnceWith({ telemetry: [{ telemetry_id: 1 }] }); + expect(mockRes.status).calledOnceWithExactly(200); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + + expect(mockDBConnection.rollback).to.not.have.been.called; + }); + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index.ts new file mode 100644 index 0000000000..cf9d4b1e7a --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index.ts @@ -0,0 +1,146 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../../database/db'; +import { TelemetrySchema } from '../../../../../../../../openapi/schemas/telemetry'; +import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/deployments/{deploymentId}/telemetry/index'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getTelemetryForDeployment() +]; + +GET.apiDoc = { + description: 'Get all telemetry for a deployment.', + tags: ['deployment', 'telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'deploymentId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + responses: { + 200: { + description: 'Responds with all telemetry for a deployment. Includes both manual and vendor telemetry.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + telemetry: { + type: 'array', + items: { + properties: { + schema: TelemetrySchema + } + } + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Gets all telemetry for a deployment. + * + * @export + * @return {*} {RequestHandler} + */ +export function getTelemetryForDeployment(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const deploymentId = Number(req.params.deploymentId); + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + const telemetry = await telemetryVendorService.getTelemetryForDeployment(surveyId, deploymentId); + + await connection.commit(); + + return res.status(200).json({ telemetry: telemetry }); + } catch (error) { + defaultLog.error({ label: 'getTelemetryForDeployment', message: 'error', error }); + await connection.rollback(); + + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/devices/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/delete.test.ts new file mode 100644 index 0000000000..9473fa1931 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/delete.test.ts @@ -0,0 +1,72 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../../../../../database/db'; +import { TelemetryDeviceService } from '../../../../../../services/telemetry-services/telemetry-device-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../.././../../__mocks__/db'; +import { deleteDevices } from './delete'; + +chai.use(sinonChai); + +describe('deleteDevices', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should delete all provided device records', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + mockReq.body = { + device_ids: [3, 4] + }; + + sinon.stub(TelemetryDeviceService.prototype, 'deleteDevices').resolves(); + + const requestHandler = deleteDevices(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.statusValue).to.equal(200); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('should catch and re-throw an error', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + mockReq.body = { + device_ids: [3, 4] + }; + + const mockError = new Error('test error'); + + sinon.stub(TelemetryDeviceService.prototype, 'deleteDevices').rejects(mockError); + + const requestHandler = deleteDevices(); + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(actualError).to.eql(mockError); + + expect(mockDBConnection.rollback).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/devices/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/delete.ts new file mode 100644 index 0000000000..64754780b0 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/delete.ts @@ -0,0 +1,137 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../database/db'; +import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { TelemetryDeviceService } from '../../../../../../services/telemetry-services/telemetry-device-service'; +import { getLogger } from '../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/devices2/delete'); + +export const POST: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [PROJECT_PERMISSION.COORDINATOR, PROJECT_PERMISSION.COLLABORATOR], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + deleteDevices() +]; + +POST.apiDoc = { + description: 'Delete devices.', + tags: ['device'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + description: 'Array of one or more device IDs to delete.', + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + required: ['device_ids'], + additionalProperties: false, + properties: { + device_ids: { + type: 'array', + items: { + type: 'integer', + minimum: 1 + }, + minItems: 1 + } + } + } + } + } + }, + responses: { + 200: { + description: 'Deletes OK.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Deletes devices. + * + * @export + * @return {*} {RequestHandler} + */ +export function deleteDevices(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const deviceIds: number[] = req.body.device_ids; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const deviceService = new TelemetryDeviceService(connection); + + await deviceService.deleteDevices(surveyId, deviceIds); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'deleteDevices', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/devices/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/index.test.ts new file mode 100644 index 0000000000..b29512c41b --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/index.test.ts @@ -0,0 +1,224 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { createDevice, getDevicesInSurvey } from '.'; +import * as db from '../../../../../../database/db'; +import { ApiError, ApiGeneralError } from '../../../../../../errors/api-error'; +import { TelemetryDeviceService } from '../../../../../../services/telemetry-services/telemetry-device-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../__mocks__/db'; + +describe('getDevicesInSurvey', () => { + afterEach(() => { + sinon.restore(); + }); + + it('gets devices in survey', async () => { + const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockDevices = [ + { + device_id: 1, + survey_id: 66, + device_key: 'key', + serial: '123456', + device_make_id: 1, + model: 'ModelX', + comment: 'Comment' + } + ]; + + sinon.stub(TelemetryDeviceService.prototype, 'getDevicesForSurvey').resolves(mockDevices); + sinon.stub(TelemetryDeviceService.prototype, 'getDevicesCount').resolves(1); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + const requestHandler = getDevicesInSurvey(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.json).to.have.been.calledOnceWith({ + devices: mockDevices, + count: 1, + pagination: { + total: 1, + per_page: 1, + current_page: 1, + last_page: 1, + sort: undefined, + order: undefined + } + }); + expect(mockRes.status).calledOnceWith(200); + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('Test error'); + + sinon.stub(TelemetryDeviceService.prototype, 'getDevicesForSurvey').rejects(mockError); + sinon.stub(TelemetryDeviceService.prototype, 'getDevicesCount').resolves(1); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + const requestHandler = getDevicesInSurvey(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(actualError).to.equal(mockError); + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); + + describe('createDevice', () => { + afterEach(() => { + sinon.restore(); + }); + + it('successfully creates a device', async () => { + const mockDBConnection = getMockDBConnection({ + release: sinon.stub(), + rollback: sinon.stub(), + commit: sinon.stub() + }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockSurveyId = '2'; + const mockSerial = '123456'; + const mockDeviceMakeId = 1; + const mockModel = 'ModelX'; + const mockComment = 'Device Comment'; + + const mockDevice = null; // Device doesn't exist, so no error should be thrown + + sinon.stub(TelemetryDeviceService.prototype, 'findDeviceBySerial').resolves(mockDevice); + sinon.stub(TelemetryDeviceService.prototype, 'createDevice').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + surveyId: mockSurveyId + }; + + mockReq.body = { + serial: mockSerial, + device_make_id: mockDeviceMakeId, + model: mockModel, + comment: mockComment + }; + + const requestHandler = createDevice(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.status).to.have.been.calledOnceWith(200); + expect(mockRes.send).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('throws error when device already exists', async () => { + const mockDBConnection = getMockDBConnection({ + release: sinon.stub(), + rollback: sinon.stub() + }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockSurveyId = '2'; + const mockSerial = '123456'; + const mockDeviceMakeId = 1; + const mockModel = 'ModelX'; + const mockComment = 'Device Comment'; + + const existingDevice = { + serial: mockSerial, + device_make_id: mockDeviceMakeId, + device_id: 1, + survey_id: 1, + device_key: '1:lotek', + model: null, + comment: 'comment' + }; + + sinon.stub(TelemetryDeviceService.prototype, 'findDeviceBySerial').resolves(existingDevice); + sinon.stub(TelemetryDeviceService.prototype, 'createDevice').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + surveyId: mockSurveyId + }; + + mockReq.body = { + serial: mockSerial, + device_make_id: mockDeviceMakeId, + model: mockModel, + comment: mockComment + }; + + const requestHandler = createDevice(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail('Error should have been thrown'); + } catch (error) { + // Assertions + expect(error).to.be.an.instanceOf(ApiGeneralError); + expect((error as ApiError).message).to.equal( + `Device ${mockSerial} of the given make already exists in the Survey.` + ); + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); + + it('handles errors and rolls back transaction', async () => { + // Mock DB connection + const mockDBConnection = getMockDBConnection({ + release: sinon.stub(), + rollback: sinon.stub() + }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('Database error'); + sinon.stub(TelemetryDeviceService.prototype, 'findDeviceBySerial').rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + surveyId: '2' + }; + + mockReq.body = { + serial: '123456', + device_make_id: 1, + model: 'ModelX', + comment: 'Device Comment' + }; + + const requestHandler = createDevice(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail('Error should have been thrown'); + } catch (actualError) { + expect(actualError).to.equal(mockError); + expect(mockDBConnection.rollback).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/devices/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/index.ts new file mode 100644 index 0000000000..3492287884 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/index.ts @@ -0,0 +1,331 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../database/db'; +import { ApiGeneralError } from '../../../../../../errors/api-error'; +import { + paginationRequestQueryParamSchema, + paginationResponseSchema +} from '../../../../../../openapi/schemas/pagination'; +import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { TelemetryDeviceService } from '../../../../../../services/telemetry-services/telemetry-device-service'; +import { getLogger } from '../../../../../../utils/logger'; +import { + ensureCompletePaginationOptions, + makePaginationOptionsFromRequest, + makePaginationResponse +} from '../../../../../../utils/pagination'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/technique/index'); + +export const POST: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [PROJECT_PERMISSION.COORDINATOR, PROJECT_PERMISSION.COLLABORATOR], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + createDevice() +]; + +POST.apiDoc = { + description: 'Create a telemetry device.', + tags: ['device'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + required: ['serial', 'device_make_id'], + additionalProperties: false, + properties: { + serial: { + type: 'string' + }, + device_make_id: { + type: 'integer', + minimum: 1 + }, + model: { + type: 'string', + maxLength: 100, + nullable: true + }, + comment: { + type: 'string', + maxLength: 250, + nullable: true + } + } + } + } + } + }, + responses: { + 201: { + description: 'Telemetry device created OK.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Create a telemetry device. + * + * @returns + */ +export function createDevice(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const serial = req.body.serial; + const device_make_id = Number(req.body.device_make_id); + const model = req.body.model; + const comment = req.body.comment; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryDeviceService = new TelemetryDeviceService(connection); + + // Check whether device already exists in Survey + const device = await telemetryDeviceService.findDeviceBySerial(surveyId, serial, device_make_id); + + // Throw error if device already exists + if (device) { + throw new ApiGeneralError(`Device ${serial} of the given make already exists in the Survey.`); + } + + await telemetryDeviceService.createDevice({ + survey_id: surveyId, + serial: serial, + device_make_id: device_make_id, + model: model, + comment: comment + }); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'createDevice', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getDevicesInSurvey() +]; + +GET.apiDoc = { + description: 'Get telemetry devices for a survey.', + tags: ['device'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + ...paginationRequestQueryParamSchema + ], + responses: { + 200: { + description: 'List of telemetry devices.', + content: { + 'application/json': { + schema: { + type: 'object', + required: ['devices', 'count'], + additionalProperties: false, + properties: { + devices: { + type: 'array', + items: { + type: 'object', + required: ['device_id', 'survey_id', 'device_key', 'serial', 'device_make_id', 'model', 'comment'], + additionalProperties: false, + properties: { + device_id: { + type: 'number' + }, + survey_id: { + type: 'number' + }, + device_key: { + type: 'string' + }, + serial: { + type: 'string' + }, + device_make_id: { + type: 'number' + }, + model: { + type: 'string', + nullable: true + }, + comment: { + type: 'string', + nullable: true + } + } + } + }, + count: { + type: 'number', + description: 'Count of telemetry devices in the respective survey.' + }, + pagination: { ...paginationResponseSchema } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Get all telemetry devices for a survey. + * + * @returns {RequestHandler} + */ +export function getDevicesInSurvey(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + + const connection = getDBConnection(req.keycloak_token); + + try { + const paginationOptions = makePaginationOptionsFromRequest(req); + + await connection.open(); + + const telemetryDeviceService = new TelemetryDeviceService(connection); + + const [devices, devicesCount] = await Promise.all([ + telemetryDeviceService.getDevicesForSurvey(surveyId, ensureCompletePaginationOptions(paginationOptions)), + telemetryDeviceService.getDevicesCount(surveyId) + ]); + + await connection.commit(); + + return res.status(200).json({ + devices: devices, + count: devicesCount, + pagination: makePaginationResponse(devicesCount, paginationOptions) + }); + } catch (error) { + defaultLog.error({ label: 'getDevices', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index.test.ts new file mode 100644 index 0000000000..37339e080e --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index.test.ts @@ -0,0 +1,190 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { deleteDevice, getDevice, updateDevice } from '.'; +import * as db from '../../../../../../../database/db'; +import { TelemetryDeviceService } from '../../../../../../../services/telemetry-services/telemetry-device-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; + +describe('getDevice', () => { + afterEach(() => { + sinon.restore(); + }); + + it('Gets an existing device', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockDevice = { + device_id: 1, + survey_id: 66, + device_key: 'key', + serial: '123456', + device_make_id: 1, + model: 'ModelX', + comment: 'Comment' + }; + + sinon.stub(TelemetryDeviceService.prototype, 'getDevice').resolves(mockDevice); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deviceId: '3' + }; + + const requestHandler = getDevice(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.json).to.have.been.calledWith({ device: mockDevice }); + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('Test error'); + + sinon.stub(TelemetryDeviceService.prototype, 'getDevice').rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deviceId: '3' + }; + + const requestHandler = getDevice(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(actualError).to.equal(mockError); + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); +}); + +describe('updateDevice', () => { + afterEach(() => { + sinon.restore(); + }); + + it('updates an existing device', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + sinon.stub(TelemetryDeviceService.prototype, 'updateDevice').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deviceId: '3' + }; + mockReq.body = { + serial: '123456', + device_make_id: 1, + model: 'ModelX', + comment: null + }; + + const requestHandler = updateDevice(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('a test error'); + + sinon.stub(TelemetryDeviceService.prototype, 'updateDevice').rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deviceId: '3' + }; + + const requestHandler = updateDevice(); + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(actualError).to.equal(mockError); + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); +}); + +describe('deleteDevice', () => { + afterEach(() => { + sinon.restore(); + }); + + it('deletes an existing device', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + sinon.stub(TelemetryDeviceService.prototype, 'deleteDevice').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deviceId: '3' + }; + + const requestHandler = deleteDevice(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.status).to.have.been.calledWith(200); + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('a test error'); + + sinon.stub(TelemetryDeviceService.prototype, 'deleteDevice').rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2', + deviceId: '3' + }; + + const requestHandler = deleteDevice(); + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(actualError).to.equal(mockError); + expect(mockDBConnection.rollback).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index.ts new file mode 100644 index 0000000000..d814586006 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index.ts @@ -0,0 +1,424 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../database/db'; +import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { TelemetryDeviceService } from '../../../../../../../services/telemetry-services/telemetry-device-service'; +import { getLogger } from '../../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/devices/{deviceId}/index'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getDevice() +]; + +GET.apiDoc = { + description: 'Get a device.', + tags: ['device'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'deviceId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + responses: { + 200: { + description: 'Device response object.', + content: { + 'application/json': { + schema: { + type: 'object', + required: ['device'], + additionalProperties: false, + properties: { + device: { + type: 'object', + required: ['device_id', 'survey_id', 'device_key', 'serial', 'device_make_id', 'model', 'comment'], + additionalProperties: false, + properties: { + device_id: { + type: 'integer' + }, + survey_id: { + type: 'integer', + minimum: 1 + }, + device_key: { + type: 'string' + }, + serial: { + type: 'string' + }, + device_make_id: { + type: 'integer', + minimum: 1 + }, + model: { + type: 'string', + maxLength: 100, + nullable: true + }, + comment: { + type: 'string', + maxLength: 250, + nullable: true + } + } + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Get a device. + * + * @returns {RequestHandler} + */ +export function getDevice(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const deviceId = Number(req.params.deviceId); + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryDeviceService = new TelemetryDeviceService(connection); + const device = await telemetryDeviceService.getDevice(surveyId, deviceId); + + await connection.commit(); + + return res.status(200).json({ device: device }); + } catch (error) { + defaultLog.error({ label: 'getDevice', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} + +export const PUT: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [PROJECT_PERMISSION.COORDINATOR, PROJECT_PERMISSION.COLLABORATOR], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + updateDevice() +]; + +PUT.apiDoc = { + description: 'Update a device', + tags: ['device'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'deviceId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + required: ['serial', 'device_make_id'], + additionalProperties: false, + properties: { + serial: { + type: 'string' + }, + device_make_id: { + type: 'integer', + minimum: 1 + }, + model: { + type: 'string', + maxLength: 100, + nullable: true + }, + comment: { + type: 'string', + maxLength: 250, + nullable: true + } + } + } + } + } + }, + responses: { + 200: { + description: 'Device updated OK.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Update a device. + * + * @export + * @return {*} {RequestHandler} + */ +export function updateDevice(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const deviceId = Number(req.params.deviceId); + + const serial = req.body.serial; + const deviceMakeId = Number(req.body.device_make_id); + const model = req.body.model; + const comment = req.body.comment; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryDeviceService = new TelemetryDeviceService(connection); + + await telemetryDeviceService.updateDevice(surveyId, deviceId, { + serial: serial, + device_make_id: deviceMakeId, + model: model, + comment: comment + }); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'updateDevice', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} + +export const DELETE: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [PROJECT_PERMISSION.COORDINATOR, PROJECT_PERMISSION.COLLABORATOR], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + deleteDevice() +]; + +DELETE.apiDoc = { + description: 'Deletes the device.', + tags: ['device'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'deviceId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + responses: { + 200: { + description: 'Delete device OK.' + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +export function deleteDevice(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const deviceId = Number(req.params.deviceId); + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryDeviceService = new TelemetryDeviceService(connection); + + await telemetryDeviceService.deleteDevice(surveyId, deviceId); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'deleteDevice', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/technique/{techniqueId}/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/technique/{techniqueId}/index.ts index 209ac8613f..9344688a4e 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/technique/{techniqueId}/index.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/technique/{techniqueId}/index.ts @@ -188,7 +188,6 @@ PUT.apiDoc = { type: 'integer', minimum: 1 }, - description: 'An array of method technique IDs', required: true } ], @@ -341,7 +340,6 @@ GET.apiDoc = { type: 'integer', minimum: 1 }, - description: 'An array of method technique IDs', required: true } ], diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/upload.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/import.ts similarity index 64% rename from api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/upload.ts rename to api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/import.ts index ed7d41ac4a..24fad45642 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/upload.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/import.ts @@ -4,9 +4,10 @@ import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/rol import { getDBConnection } from '../../../../../../database/db'; import { csvFileSchema } from '../../../../../../openapi/schemas/file'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; -import { TelemetryService } from '../../../../../../services/telemetry-service'; -import { uploadFileToS3 } from '../../../../../../utils/file-utils'; +import { importCSV } from '../../../../../../services/import-services/import-csv'; +import { ImportTelemetryStrategy } from '../../../../../../services/import-services/telemetry/import-telemetry-strategy'; import { getLogger } from '../../../../../../utils/logger'; +import { parseMulterFile } from '../../../../../../utils/media/media-utils'; import { getFileFromRequest } from '../../../../../../utils/request'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/telemetry/upload'); @@ -27,7 +28,7 @@ export const POST: Operation = [ ] }; }), - uploadMedia() + importTelemetryCSV() ]; POST.apiDoc = { @@ -58,6 +59,7 @@ POST.apiDoc = { schema: { type: 'object', additionalProperties: false, + required: ['media'], properties: { media: { description: 'A survey telemetry submission file.', @@ -73,20 +75,7 @@ POST.apiDoc = { }, responses: { 200: { - description: 'Upload OK', - content: { - 'application/json': { - schema: { - type: 'object', - additionalProperties: false, - properties: { - submission_id: { - type: 'number' - } - } - } - } - } + description: 'Import OK' }, 400: { $ref: '#/components/responses/400' @@ -107,12 +96,13 @@ POST.apiDoc = { }; /** - * Uploads a media file to S3 and inserts a matching record in the `survey_telemetry_submission` table. + * Imports manual telemetry from a CSV file. * * @return {*} {RequestHandler} */ -export function uploadMedia(): RequestHandler { +export function importTelemetryCSV(): RequestHandler { return async (req, res) => { + const surveyId = Number(req.params.surveyId); const rawMediaFile = getFileFromRequest(req); const connection = getDBConnection(req.keycloak_token); @@ -120,31 +110,18 @@ export function uploadMedia(): RequestHandler { try { await connection.open(); - // Insert a new record in the `survey_telemetry_submission` table - const service = new TelemetryService(connection); - const { submission_id: submissionId, key } = await service.insertSurveyTelemetrySubmission( - rawMediaFile, - Number(req.params.projectId), - Number(req.params.surveyId) - ); + const telemetryStrategy = new ImportTelemetryStrategy(connection, surveyId); - // Upload file to S3 - const metadata = { - filename: rawMediaFile.originalname, - username: req.keycloak_token?.preferred_username ?? '', - email: req.keycloak_token?.email ?? '' - }; - - const result = await uploadFileToS3(rawMediaFile, key, metadata); - - defaultLog.debug({ label: 'uploadMedia', message: 'result', result }); + // Pass CSV file and importer as dependencies + await importCSV(parseMulterFile(rawMediaFile), telemetryStrategy); await connection.commit(); - return res.status(200).json({ submission_id: submissionId }); + return res.status(200).send(); } catch (error) { - defaultLog.error({ label: 'uploadMedia', message: 'error', error }); + defaultLog.error({ label: 'importTelemetry', message: 'error', error }); await connection.rollback(); + throw error; } finally { connection.release(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/index.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/index.test.ts new file mode 100644 index 0000000000..fcf4c37e47 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/index.test.ts @@ -0,0 +1,102 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { getTelemetryInSurvey } from '.'; +import * as db from '../../../../../../database/db'; +import { + Telemetry, + TelemetryVendorEnum +} from '../../../../../../repositories/telemetry-repositories/telemetry-vendor-repository.interface'; + +import { TelemetryVendorService } from '../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../__mocks__/db'; + +describe('getTelemetryInSurvey', () => { + afterEach(() => { + sinon.restore(); + }); + + it('gets deployments in survey', async () => { + const mockDBConnection = getMockDBConnection({ commit: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockTelemetry: Telemetry[] = [ + { + telemetry_id: '123-456-789', + deployment_id: 2, + critter_id: 3, + vendor: TelemetryVendorEnum.VECTRONIC, + serial: '123456', + acquisition_date: '2021-01-01T00:00:00.000Z', + latitude: -49, + longitude: 125, + elevation: null, + temperature: null + } + ]; + + const mockCount = 1; + + const mockResponse: [Telemetry[], number] = [mockTelemetry, mockCount]; + + sinon.stub(TelemetryVendorService.prototype, 'getTelemetryForSurvey').resolves(mockResponse); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + const requestHandler = getTelemetryInSurvey(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(mockRes.json).to.have.been.calledOnceWith({ + telemetry: mockTelemetry, + count: mockCount, + pagination: { + total: 1, + per_page: 1, + current_page: 1, + last_page: 1, + sort: undefined, + order: undefined + } + }); + expect(mockRes.status).calledOnceWith(200); + + expect(mockDBConnection.commit).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + }); + + it('catches and re-throws errors', async () => { + const mockDBConnection = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(mockDBConnection); + + const mockError = new Error('Test error'); + + const getTelemetryForSurveyStub = sinon + .stub(TelemetryVendorService.prototype, 'getTelemetryForSurvey') + .rejects(mockError); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.params = { + projectId: '1', + surveyId: '2' + }; + + const requestHandler = getTelemetryInSurvey(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(getTelemetryForSurveyStub).calledOnce; + expect(actualError).to.equal(mockError); + + expect(mockDBConnection.rollback).to.have.been.calledOnce; + expect(mockDBConnection.release).to.have.been.calledOnce; + } + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/index.ts new file mode 100644 index 0000000000..79e34efbf8 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/index.ts @@ -0,0 +1,208 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../database/db'; +import { + paginationRequestQueryParamSchema, + paginationResponseSchema +} from '../../../../../../openapi/schemas/pagination'; +import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../utils/logger'; +import { + ensureCompletePaginationOptions, + makePaginationOptionsFromRequest, + makePaginationResponse +} from '../../../../../../utils/pagination'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/telemetry/index'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getTelemetryInSurvey() +]; + +GET.apiDoc = { + description: 'Gets all telemetry records in a survey.', + tags: ['telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + ...paginationRequestQueryParamSchema + ], + responses: { + 200: { + description: 'Responds with information about all telemetry records under this survey.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + telemetry: { + title: 'Telemetry Records', + type: 'array', + items: { + title: 'Telemetry Record', + type: 'object', + additionalProperties: false, + required: [ + 'telemetry_id', + 'deployment_id', + 'critter_id', + 'vendor', + 'serial', + 'acquisition_date', + 'latitude', + 'longitude', + 'elevation', + 'temperature' + ], + properties: { + telemetry_id: { + type: 'string' + }, + deployment_id: { + type: 'number' + }, + critter_id: { + type: 'number' + }, + vendor: { + type: 'string', + enum: ['vectronic', 'lotek', 'ats', 'manual'] + }, + serial: { + type: 'string' + }, + acquisition_date: { + type: 'string' + }, + latitude: { + type: 'number', + nullable: true + }, + longitude: { + type: 'number', + nullable: true + }, + elevation: { + type: 'number', + nullable: true + }, + temperature: { + type: 'number', + nullable: true + } + } + } + }, + count: { + type: 'number', + description: 'Count of telemetry records in the respective survey.' + }, + pagination: { ...paginationResponseSchema } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Gets all telemetry records in a survey. + * + * @export + * @return {*} {RequestHandler} + */ +export function getTelemetryInSurvey(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + + const connection = getDBConnection(req.keycloak_token); + + try { + const paginationOptions = makePaginationOptionsFromRequest(req); + + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + const [telemetry, telemetryCount] = await telemetryVendorService.getTelemetryForSurvey(surveyId, { + pagination: ensureCompletePaginationOptions(paginationOptions) + }); + + await connection.commit(); + + return res.status(200).json({ + telemetry: telemetry, + count: telemetryCount, + pagination: makePaginationResponse(telemetryCount, paginationOptions) + }); + } catch (error) { + defaultLog.error({ label: 'getTelemetryInSurvey', message: 'error', error }); + await connection.rollback(); + + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/spatial.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/spatial.ts new file mode 100644 index 0000000000..ab7876ef9b --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/spatial.ts @@ -0,0 +1,162 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../database/db'; +import { GeoJSONPoint } from '../../../../../../openapi/schemas/geoJson'; +import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../utils/logger'; + +const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/observation'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getTelemetrySpatialData() +]; + +GET.apiDoc = { + description: 'Get all telemetry spatial data for a survey.', + tags: ['telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'number', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'number', + minimum: 1 + }, + required: true + } + ], + responses: { + 200: { + description: 'Survey telemetry spatial data.', + content: { + 'application/json': { + schema: { + type: 'object', + required: ['telemetry', 'supplementaryData'], + additionalProperties: false, + properties: { + telemetry: { + type: 'array', + items: { + type: 'object', + required: ['telemetry_id', 'geometry'], + additionalProperties: false, + properties: { + telemetry_id: { + type: 'string', + format: 'uuid' + }, + geometry: { + ...GeoJSONPoint, + nullable: true + } + } + } + }, + supplementaryData: { + type: 'object', + required: ['count'], + additionalProperties: false, + properties: { + count: { + type: 'integer', + minimum: 0 + } + } + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Fetch all telemetry spatial data for a survey. + * + * @export + * @return {*} {RequestHandler} + */ +export function getTelemetrySpatialData(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + + defaultLog.debug({ label: 'getTelemetrySpatialData', surveyId }); + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + const [telemetry, telemetryCount] = await telemetryVendorService.getTelemetrySpatialForSurvey(surveyId); + + await connection.commit(); + + return res.status(200).json({ + telemetry: telemetry, + supplementaryData: { + count: telemetryCount + } + }); + } catch (error) { + defaultLog.error({ label: 'getTelemetrySpatialData', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/{telemetryId}/index.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/{telemetryId}/index.ts new file mode 100644 index 0000000000..efdf2cf7d9 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/telemetry/{telemetryId}/index.ts @@ -0,0 +1,190 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../database/db'; +import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { TelemetryVendorService } from '../../../../../../../services/telemetry-services/telemetry-vendor-service'; +import { getLogger } from '../../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/telemetry/{telemetryId}/index'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + or: [ + { + validProjectPermissions: [ + PROJECT_PERMISSION.COORDINATOR, + PROJECT_PERMISSION.COLLABORATOR, + PROJECT_PERMISSION.OBSERVER + ], + surveyId: Number(req.params.surveyId), + discriminator: 'ProjectPermission' + }, + { + validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' + } + ] + }; + }), + getTelemetryRecordInSurvey() +]; + +GET.apiDoc = { + description: 'Gets a telemetry record.', + tags: ['telemetry'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'telemetryId', + schema: { + type: 'string', + format: 'uuid' + }, + required: true + } + ], + responses: { + 200: { + description: 'Responds with information about a telemetry record under this survey.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + telemetry: { + title: 'Telemetry Record', + type: 'object', + additionalProperties: false, + required: [ + 'telemetry_id', + 'deployment_id', + 'critter_id', + 'vendor', + 'serial', + 'acquisition_date', + 'latitude', + 'longitude', + 'elevation', + 'temperature' + ], + properties: { + telemetry_id: { + type: 'string' + }, + deployment_id: { + type: 'number' + }, + critter_id: { + type: 'number' + }, + vendor: { + type: 'string', + enum: ['vectronic', 'lotek', 'ats', 'manual'] + }, + serial: { + type: 'string' + }, + acquisition_date: { + type: 'string' + }, + latitude: { + type: 'number', + nullable: true + }, + longitude: { + type: 'number', + nullable: true + }, + elevation: { + type: 'number', + nullable: true + }, + temperature: { + type: 'number', + nullable: true + } + } + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Gets a telemetry record in a survey. + * + * @export + * @return {*} {RequestHandler} + */ +export function getTelemetryRecordInSurvey(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + const telemetryId = req.params.telemetryId; + + const connection = getDBConnection(req.keycloak_token); + + try { + await connection.open(); + + const telemetryVendorService = new TelemetryVendorService(connection); + + const telemetry = await telemetryVendorService.getTelemetryRecordById(surveyId, telemetryId); + + await connection.commit(); + + return res.status(200).json({ telemetry: telemetry }); + } catch (error) { + defaultLog.error({ label: 'getTelemetryRecordInSurvey', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/telemetry/code.test.ts b/api/src/paths/telemetry/code.test.ts deleted file mode 100644 index e510c3b65f..0000000000 --- a/api/src/paths/telemetry/code.test.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { SystemUser } from '../../repositories/user-repository'; -import { BctwService } from '../../services/bctw-service/bctw-service'; -import { getRequestHandlerMocks } from '../../__mocks__/db'; -import { getCodeValues } from './code'; - -describe('getCodeValues', () => { - afterEach(() => { - sinon.restore(); - }); - - it('returns a list of Bctw code objects', async () => { - const mockCodeValues = [ - { - code_header_title: 'title', - code_header_name: 'name', - id: 123, - code: 'code', - description: 'description', - long_description: 'long_description' - } - ]; - const mockGetCode = sinon.stub(BctwService.prototype, 'getCode').resolves(mockCodeValues); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getCodeValues(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockCodeValues); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetCode).to.have.been.calledOnce; - }); - - it('catches and re-throws errors', async () => { - const mockError = new Error('mock error'); - const mockGetCode = sinon.stub(BctwService.prototype, 'getCode').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getCodeValues(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(actualError).to.equal(mockError); - expect(mockGetCode).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/code.ts b/api/src/paths/telemetry/code.ts deleted file mode 100644 index a2537809a7..0000000000 --- a/api/src/paths/telemetry/code.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { BctwService, getBctwUser } from '../../services/bctw-service/bctw-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/code'); - -export const GET: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - getCodeValues() -]; - -GET.apiDoc = { - description: 'Get a list of "code" values from the exterior telemetry system.', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: { - 200: { - description: 'Generic telemetry code response.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - type: 'object', - additionalProperties: false, - properties: { - id: { type: 'number' }, - code: { type: 'string' }, - code_header_title: { type: 'string' }, - code_header_name: { type: 'string' }, - description: { type: 'string' }, - long_description: { type: 'string' } - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getCodeValues(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwService = new BctwService(user); - - const codeHeader = String(req.query.codeHeader); - - try { - const result = await bctwService.getCode(codeHeader); - - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getCodeValues', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/deployments.test.ts b/api/src/paths/telemetry/deployments.test.ts deleted file mode 100644 index 471f6a2f23..0000000000 --- a/api/src/paths/telemetry/deployments.test.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { SystemUser } from '../../repositories/user-repository'; -import { BctwTelemetryService, IAllTelemetry } from '../../services/bctw-service/bctw-telemetry-service'; -import { getRequestHandlerMocks } from '../../__mocks__/db'; -import { getAllTelemetryByDeploymentIds } from './deployments'; - -const mockTelemetry: IAllTelemetry[] = [ - { - id: '123-123-123', - telemetry_id: null, - telemetry_manual_id: '123-123-123', - deployment_id: '345-345-345', - latitude: 49.123, - longitude: -126.123, - acquisition_date: '2021-01-01', - telemetry_type: 'manual' - }, - { - id: '567-567-567', - telemetry_id: '567-567-567', - telemetry_manual_id: null, - deployment_id: '345-345-345', - latitude: 49.123, - longitude: -126.123, - acquisition_date: '2021-01-01', - telemetry_type: 'vendor' - } -]; - -describe('getAllTelemetryByDeploymentIds', () => { - afterEach(() => { - sinon.restore(); - }); - it('should retrieve both manual and vendor telemetry', async () => { - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'getAllTelemetryByDeploymentIds') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getAllTelemetryByDeploymentIds(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetTelemetry).to.have.been.calledOnce; - }); - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'getAllTelemetryByDeploymentIds') - .rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getAllTelemetryByDeploymentIds(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/deployments.ts b/api/src/paths/telemetry/deployments.ts deleted file mode 100644 index 035276ad86..0000000000 --- a/api/src/paths/telemetry/deployments.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { AllTelemetrySchema } from '../../openapi/schemas/telemetry'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { getBctwUser } from '../../services/bctw-service/bctw-service'; -import { BctwTelemetryService } from '../../services/bctw-service/bctw-telemetry-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/deployments'); - -export const GET: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - getAllTelemetryByDeploymentIds() -]; - -GET.apiDoc = { - description: 'Get manual and vendor telemetry for a set of deployment Ids', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'query', - name: 'bctwDeploymentIds', - schema: { - type: 'array', - items: { type: 'string', format: 'uuid', minimum: 1 } - }, - required: true - } - ], - responses: { - 200: { - description: 'Manual and Vendor telemetry response object', - content: { - 'application/json': { - schema: { - type: 'array', - items: AllTelemetrySchema - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getAllTelemetryByDeploymentIds(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwTelemetryService = new BctwTelemetryService(user); - - try { - const bctwDeploymentIds = req.query.bctwDeploymentIds as string[]; - - const result = await bctwTelemetryService.getAllTelemetryByDeploymentIds(bctwDeploymentIds); - - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getAllTelemetryByDeploymentIds', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/device/index.test.ts b/api/src/paths/telemetry/device/index.test.ts deleted file mode 100644 index db121c53aa..0000000000 --- a/api/src/paths/telemetry/device/index.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import Ajv from 'ajv'; -import { expect } from 'chai'; -import sinon from 'sinon'; -import { HTTPError } from '../../../errors/http-error'; -import { SystemUser } from '../../../repositories/user-repository'; -import { BctwDeviceService } from '../../../services/bctw-service/bctw-device-service'; -import { getRequestHandlerMocks } from '../../../__mocks__/db'; -import { POST, upsertDevice } from './index'; - -describe('upsertDevice', () => { - afterEach(() => { - sinon.restore(); - }); - - describe('openapi schema', () => { - const ajv = new Ajv(); - - it('is valid openapi v3 schema', () => { - expect(ajv.validateSchema(POST.apiDoc as unknown as object)).to.be.true; - }); - }); - - it('upsert device details', async () => { - const mockUpsertDevice = sinon.stub(BctwDeviceService.prototype, 'updateDevice'); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = upsertDevice(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.statusValue).to.equal(200); - expect(mockUpsertDevice).to.have.been.calledOnce; - }); - - it('catches and re-throws errors', async () => { - const mockError = new Error('a test error'); - const mockBctwService = sinon.stub(BctwDeviceService.prototype, 'updateDevice').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = upsertDevice(); - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((mockError as HTTPError).message).to.eql('a test error'); - expect(mockBctwService).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/device/index.ts b/api/src/paths/telemetry/device/index.ts deleted file mode 100644 index a0f4006f9f..0000000000 --- a/api/src/paths/telemetry/device/index.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { BctwDeviceService } from '../../../services/bctw-service/bctw-device-service'; -import { getBctwUser } from '../../../services/bctw-service/bctw-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/device/{deviceId}'); - -export const POST: Operation = [ - // TODO: Should this endpoint be guarded such that the user must at the very least belong to a project? - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - upsertDevice() -]; - -POST.apiDoc = { - description: 'Upsert device metadata inside BCTW.', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - requestBody: { - description: 'Device body', - required: true, - content: { - 'application/json': { - schema: { - properties: { - collar_id: { - type: 'string', - format: 'uuid' - }, - device_id: { - type: 'integer' - }, - device_make: { - type: 'string' - }, - device_model: { - type: 'string', - nullable: true - }, - frequency: { - type: 'number', - nullable: true - }, - frequency_unit: { - type: 'string', - nullable: true - } - } - } - } - } - }, - responses: { - 200: { - description: 'Resultant object of upsert.', - content: { - 'application/json': { - schema: { - type: 'object' - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function upsertDevice(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwDeviceService = new BctwDeviceService(user); - try { - const results = await bctwDeviceService.updateDevice(req.body); - return res.status(200).json(results); - } catch (error) { - defaultLog.error({ label: 'upsertDevice', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/index.test.ts b/api/src/paths/telemetry/index.test.ts index 4e53a7fea1..400ed4dbd2 100644 --- a/api/src/paths/telemetry/index.test.ts +++ b/api/src/paths/telemetry/index.test.ts @@ -5,8 +5,12 @@ import sinonChai from 'sinon-chai'; import { SYSTEM_ROLE } from '../../constants/roles'; import * as db from '../../database/db'; import { HTTPError } from '../../errors/http-error'; +import { + Telemetry, + TelemetryVendorEnum +} from '../../repositories/telemetry-repositories/telemetry-vendor-repository.interface'; import { SystemUser } from '../../repositories/user-repository'; -import { FindTelemetryResponse, TelemetryService } from '../../services/telemetry-service'; +import { TelemetryVendorService } from '../../services/telemetry-services/telemetry-vendor-service'; import { KeycloakUserInformation } from '../../utils/keycloak-utils'; import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; import { findTelemetry } from './index'; @@ -19,19 +23,18 @@ describe('findTelemetry', () => { }); it('finds and returns telemetry', async () => { - const mockFindTelemetryResponse: FindTelemetryResponse[] = [ + const mockFindTelemetryResponse: Telemetry[] = [ { telemetry_id: '789-789-789', + deployment_id: 2, + critter_id: 3, acquisition_date: '2021-01-01', + vendor: TelemetryVendorEnum.MANUAL, + serial: '123', latitude: 49.123, longitude: -126.123, - telemetry_type: 'vendor', - device_id: 123, - bctw_deployment_id: '123-123-123', - critter_id: 1, - deployment_id: 2, - critterbase_critter_id: '456-456-456', - animal_id: '678-678-678' + elevation: null, + temperature: null } ]; @@ -45,14 +48,17 @@ describe('findTelemetry', () => { sinon.stub(db, 'getDBConnection').returns(mockDBConnection); const findTelemetryStub = sinon - .stub(TelemetryService.prototype, 'findTelemetry') + .stub(TelemetryVendorService.prototype, 'findTelemetry') .resolves(mockFindTelemetryResponse); + const findTelemetryCountStub = sinon.stub(TelemetryVendorService.prototype, 'findTelemetryCount').resolves(1); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.query = { keyword: 'keyword', - itis_tsns: ['123456'], + start_date: '2021-01-01', + end_date: '2021-02-01', system_user_id: '11', page: '2', limit: '10', @@ -72,6 +78,7 @@ describe('findTelemetry', () => { expect(mockDBConnection.commit).to.have.been.calledOnce; expect(findTelemetryStub).to.have.been.calledOnceWith(true, 20, sinon.match.object, sinon.match.object); + expect(findTelemetryCountStub).to.have.been.calledOnceWith(true, 20, sinon.match.object); expect(mockRes.jsonValue.telemetry).to.eql(mockFindTelemetryResponse); expect(mockRes.jsonValue.pagination).not.to.be.null; @@ -91,14 +98,15 @@ describe('findTelemetry', () => { sinon.stub(db, 'getDBConnection').returns(mockDBConnection); const findTelemetryStub = sinon - .stub(TelemetryService.prototype, 'findTelemetry') + .stub(TelemetryVendorService.prototype, 'findTelemetry') .rejects(new Error('a test error')); const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.query = { keyword: 'keyword', - itis_tsns: ['123456'], + start_date: '2021-01-01', + end_date: '2021-02-01', system_user_id: '11', page: '2', limit: '10', diff --git a/api/src/paths/telemetry/index.ts b/api/src/paths/telemetry/index.ts index 1f2f46cda5..d41c98a071 100644 --- a/api/src/paths/telemetry/index.ts +++ b/api/src/paths/telemetry/index.ts @@ -5,7 +5,7 @@ import { getDBConnection } from '../../database/db'; import { IAllTelemetryAdvancedFilters } from '../../models/telemetry-view'; import { paginationRequestQueryParamSchema, paginationResponseSchema } from '../../openapi/schemas/pagination'; import { authorizeRequestHandler, userHasValidRole } from '../../request-handlers/security/authorization'; -import { TelemetryService } from '../../services/telemetry-service'; +import { TelemetryVendorService } from '../../services/telemetry-services/telemetry-vendor-service'; import { getLogger } from '../../utils/logger'; import { ensureCompletePaginationOptions, @@ -70,6 +70,28 @@ GET.apiDoc = { nullable: true } }, + { + in: 'query', + name: 'start_date', + description: 'ISO 8601 date string', + required: false, + schema: { + type: 'string', + nullable: true + }, + example: '2021-01-01' + }, + { + in: 'query', + name: 'end_date', + description: 'ISO 8601 date string', + required: false, + schema: { + type: 'string', + nullable: true + }, + example: '2021-02-01' + }, { in: 'query', name: 'system_user_id', @@ -97,58 +119,66 @@ GET.apiDoc = { items: { type: 'object', additionalProperties: false, + required: [ + 'telemetry_id', + 'deployment_id', + 'critter_id', + 'vendor', + 'serial', + 'acquisition_date', + 'latitude', + 'longitude', + 'elevation', + 'temperature' + ], properties: { telemetry_id: { type: 'string', - description: 'The BCTW telemetry record ID.' + format: 'uuid', + description: 'The telemetry record ID.' + }, + deployment_id: { + type: 'number', + minimum: 1, + description: 'The deployment record ID.' + }, + critter_id: { + type: 'number', + minimum: 1, + description: 'The SIMS critter record ID.' + }, + vendor: { + type: 'string', + description: 'The telemetry device vendor.' + }, + serial: { + type: 'string', + description: 'The telemetry device serial number.' }, acquisition_date: { type: 'string', nullable: true, - description: 'The BCTW telemetry record acquisition date.' + description: 'The date the telemetry record was recorded by the telemetry device.' }, latitude: { type: 'number', nullable: true, - description: 'The BCTW telemetry record latitude.' + description: 'The latitude of the telemetry record.' }, longitude: { type: 'number', nullable: true, - description: 'The BCTW telemetry record longitude.' - }, - telemetry_type: { - type: 'string', - description: 'The BCTW telemetry type.' + description: 'The longitude of the telemetry record.' }, - device_id: { + elevation: { type: 'number', - description: 'The BCTW device ID.' + description: 'The elevation of the telemetry point.', + nullable: true }, - bctw_deployment_id: { - type: 'string', - format: 'uuid', - description: 'The BCTW deployment ID.' - }, - critter_id: { + temperature: { type: 'number', - minimum: 1, - description: 'The SIMS critter record ID.' - }, - deployment_id: { - type: 'number', - minimum: 1, - description: 'The SIMS deployment record ID.' - }, - critterbase_critter_id: { - type: 'string', - format: 'uuid', - description: 'The Critterbase critter ID.' - }, - animal_id: { - type: 'string', - nullable: true, - description: 'The Critterbase animal ID.' + description: 'The temperature of the telemetry point.', + nullable: true } } } @@ -204,14 +234,17 @@ export function findTelemetry(): RequestHandler { const paginationOptions = makePaginationOptionsFromRequest(req); - const telemetryService = new TelemetryService(connection); + const telemetryVendorService = new TelemetryVendorService(connection); - const telemetry = await telemetryService.findTelemetry( - isUserAdmin, - systemUserId, - filterFields, - ensureCompletePaginationOptions(paginationOptions) - ); + const [telemetry, telemetryCount] = await Promise.all([ + telemetryVendorService.findTelemetry( + isUserAdmin, + systemUserId, + filterFields, + ensureCompletePaginationOptions(paginationOptions) + ), + telemetryVendorService.findTelemetryCount(isUserAdmin, systemUserId, filterFields) + ]); await connection.commit(); @@ -220,7 +253,7 @@ export function findTelemetry(): RequestHandler { return res .status(200) - .json({ telemetry: telemetry, pagination: makePaginationResponse(telemetry.length, paginationOptions) }); + .json({ telemetry: telemetry, pagination: makePaginationResponse(telemetryCount, paginationOptions) }); } catch (error) { defaultLog.error({ label: 'findTelemetry', message: 'error', error }); await connection.rollback(); @@ -244,6 +277,8 @@ function parseQueryParams( keyword: req.query.keyword ?? undefined, itis_tsns: req.query.itis_tsns ?? undefined, itis_tsn: (req.query.itis_tsn && Number(req.query.itis_tsn)) ?? undefined, + start_date: req.query.start_date ?? undefined, + end_date: req.query.end_date ?? undefined, system_user_id: req.query.system_user_id ?? undefined }; } diff --git a/api/src/paths/telemetry/manual/delete.test.ts b/api/src/paths/telemetry/manual/delete.test.ts deleted file mode 100644 index c0ce05f141..0000000000 --- a/api/src/paths/telemetry/manual/delete.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { SystemUser } from '../../../repositories/user-repository'; -import { BctwTelemetryService, IManualTelemetry } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getRequestHandlerMocks } from '../../../__mocks__/db'; -import { deleteManualTelemetry } from './delete'; - -const mockTelemetry = [ - { - telemetry_manual_id: 1 - }, - { - telemetry_manual_id: 2 - } -] as unknown[] as IManualTelemetry[]; - -describe('deleteManualTelemetry', () => { - afterEach(() => { - sinon.restore(); - }); - it('should retrieve all manual telemetry', async () => { - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'deleteManualTelemetry') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = deleteManualTelemetry(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetTelemetry).to.have.been.calledOnce; - }); - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon.stub(BctwTelemetryService.prototype, 'deleteManualTelemetry').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = deleteManualTelemetry(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/manual/delete.ts b/api/src/paths/telemetry/manual/delete.ts deleted file mode 100644 index 90ed481f67..0000000000 --- a/api/src/paths/telemetry/manual/delete.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { manual_telemetry_responses } from '.'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { getBctwUser } from '../../../services/bctw-service/bctw-service'; -import { BctwTelemetryService } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getLogger } from '../../../utils/logger'; -const defaultLog = getLogger('paths/telemetry/manual/delete'); - -export const POST: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - deleteManualTelemetry() -]; - -POST.apiDoc = { - description: 'Delete manual telemetry records', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - requestBody: { - description: 'Request body', - required: true, - content: { - 'application/json': { - schema: { - title: 'Manual Telemetry ids to delete', - type: 'array', - minItems: 1, - items: { - title: 'telemetry manual ids', - type: 'string', - format: 'uuid' - } - } - } - } - }, - responses: manual_telemetry_responses -}; - -export function deleteManualTelemetry(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwTelemetryService = new BctwTelemetryService(user); - try { - const result = await bctwTelemetryService.deleteManualTelemetry(req.body); - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'deleteManualTelemetry', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/manual/deployments.test.ts b/api/src/paths/telemetry/manual/deployments.test.ts deleted file mode 100644 index 601da022d8..0000000000 --- a/api/src/paths/telemetry/manual/deployments.test.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { SystemUser } from '../../../repositories/user-repository'; -import { BctwTelemetryService, IManualTelemetry } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getRequestHandlerMocks } from '../../../__mocks__/db'; -import { getManualTelemetryByDeploymentIds } from './deployments'; - -const mockTelemetry = [ - { - telemetry_manual_id: 1 - }, - { - telemetry_manual_id: 2 - } -] as unknown[] as IManualTelemetry[]; - -describe('getManualTelemetryByDeploymentIds', () => { - afterEach(() => { - sinon.restore(); - }); - it('should retrieve all manual telemetry', async () => { - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'getManualTelemetryByDeploymentIds') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getManualTelemetryByDeploymentIds(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetTelemetry).to.have.been.calledOnce; - }); - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'getManualTelemetryByDeploymentIds') - .rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getManualTelemetryByDeploymentIds(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/manual/deployments.ts b/api/src/paths/telemetry/manual/deployments.ts deleted file mode 100644 index 83c497ba7b..0000000000 --- a/api/src/paths/telemetry/manual/deployments.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { manual_telemetry_responses } from '.'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { getBctwUser } from '../../../services/bctw-service/bctw-service'; -import { BctwTelemetryService } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/manual'); - -export const POST: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - getManualTelemetryByDeploymentIds() -]; - -POST.apiDoc = { - description: 'Get a list of manually created telemetry by deployment ids', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: manual_telemetry_responses, - requestBody: { - description: 'Request body', - required: true, - content: { - 'application/json': { - schema: { - title: 'Manual Telemetry deployment ids', - type: 'array', - minItems: 1, - items: { - title: 'Manual telemetry deployment ids', - type: 'string', - format: 'uuid' - } - } - } - } - } -}; - -export function getManualTelemetryByDeploymentIds(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwService = new BctwTelemetryService(user); - - try { - const result = await bctwService.getManualTelemetryByDeploymentIds(req.body); - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getManualTelemetryByDeploymentIds', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/manual/index.test.ts b/api/src/paths/telemetry/manual/index.test.ts deleted file mode 100644 index 0ada27ba2b..0000000000 --- a/api/src/paths/telemetry/manual/index.test.ts +++ /dev/null @@ -1,150 +0,0 @@ -import Ajv from 'ajv'; -import { expect } from 'chai'; -import sinon from 'sinon'; -import { createManualTelemetry, GET, getManualTelemetry, PATCH, POST, updateManualTelemetry } from '.'; -import { SystemUser } from '../../../repositories/user-repository'; -import { BctwTelemetryService, IManualTelemetry } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getRequestHandlerMocks } from '../../../__mocks__/db'; - -const mockTelemetry = [ - { - telemetry_manual_id: 1 - }, - { - telemetry_manual_id: 2 - } -] as unknown[] as IManualTelemetry[]; - -describe('manual telemetry endpoints', () => { - afterEach(() => { - sinon.restore(); - }); - - describe('getManualTelemetry', () => { - describe('openapi schema', () => { - it('is valid openapi v3 schema', () => { - const ajv = new Ajv(); - expect(ajv.validateSchema(GET.apiDoc as unknown as object)).to.be.true; - }); - }); - it('should retrieve all manual telemetry', async () => { - const mockGetTelemetry = sinon.stub(BctwTelemetryService.prototype, 'getManualTelemetry').resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getManualTelemetry(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetTelemetry).to.have.been.calledOnce; - }); - - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon.stub(BctwTelemetryService.prototype, 'getManualTelemetry').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getManualTelemetry(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); - }); - - describe('createManualTelemetry', () => { - describe('openapi schema', () => { - it('is valid openapi v3 schema', () => { - const ajv = new Ajv(); - expect(ajv.validateSchema(POST.apiDoc as unknown as object)).to.be.true; - }); - }); - it('should bulk create manual telemetry', async () => { - const mockCreateTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'createManualTelemetry') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = createManualTelemetry(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(201); - expect(mockCreateTelemetry).to.have.been.calledOnce; - }); - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon.stub(BctwTelemetryService.prototype, 'createManualTelemetry').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = createManualTelemetry(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); - }); - - describe('updateManualTelemetry', () => { - describe('openapi schema', () => { - it('is valid openapi v3 schema', () => { - const ajv = new Ajv(); - expect(ajv.validateSchema(PATCH.apiDoc as unknown as object)).to.be.true; - }); - }); - it('should bulk update manual telemetry', async () => { - const mockCreateTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'updateManualTelemetry') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = updateManualTelemetry(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(201); - expect(mockCreateTelemetry).to.have.been.calledOnce; - }); - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon.stub(BctwTelemetryService.prototype, 'updateManualTelemetry').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = updateManualTelemetry(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); - }); -}); diff --git a/api/src/paths/telemetry/manual/index.ts b/api/src/paths/telemetry/manual/index.ts deleted file mode 100644 index 9156ff8576..0000000000 --- a/api/src/paths/telemetry/manual/index.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { getBctwUser } from '../../../services/bctw-service/bctw-service'; -import { BctwTelemetryService } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/manual'); - -export const manual_telemetry_responses = { - 200: { - description: 'Manual telemetry response object', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - type: 'object', - additionalProperties: false, - properties: { - telemetry_manual_id: { type: 'string' }, - deployment_id: { type: 'string' }, - latitude: { type: 'number' }, - longitude: { type: 'number' }, - acquisition_date: { type: 'string' } - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } -}; - -export const GET: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - getManualTelemetry() -]; - -GET.apiDoc = { - description: 'Get a list of manually created telemetry', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: manual_telemetry_responses -}; - -export function getManualTelemetry(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwService = new BctwTelemetryService(user); - try { - const result = await bctwService.getManualTelemetry(); - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getManualTelemetry', message: 'error', error }); - throw error; - } - }; -} - -export const POST: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - createManualTelemetry() -]; - -POST.apiDoc = { - description: 'Bulk create Manual Telemetry', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: manual_telemetry_responses, - requestBody: { - description: 'Request body', - required: true, - content: { - 'application/json': { - schema: { - title: 'Manual Telemetry create objects', - type: 'array', - minItems: 1, - items: { - title: 'manual telemetry records', - type: 'object', - additionalProperties: false, - required: ['deployment_id', 'latitude', 'longitude', 'acquisition_date'], - properties: { - deployment_id: { - type: 'string', - format: 'uuid' - }, - latitude: { - type: 'number' - }, - longitude: { - type: 'number' - }, - acquisition_date: { - type: 'string' - } - } - } - } - } - } - } -}; - -export function createManualTelemetry(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - const bctwService = new BctwTelemetryService(user); - try { - const result = await bctwService.createManualTelemetry(req.body); - return res.status(201).json(result); - } catch (error) { - defaultLog.error({ label: 'createManualTelemetry', message: 'error', error }); - throw error; - } - }; -} - -export const PATCH: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - updateManualTelemetry() -]; - -PATCH.apiDoc = { - description: 'Bulk update Manual Telemetry', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: manual_telemetry_responses, - requestBody: { - description: 'Request body', - required: true, - content: { - 'application/json': { - schema: { - title: 'Manual Telemetry update objects', - type: 'array', - minItems: 1, - items: { - title: 'manual telemetry records', - type: 'object', - additionalProperties: false, - required: ['telemetry_manual_id'], - minProperties: 2, - properties: { - telemetry_manual_id: { - type: 'string', - format: 'uuid' - }, - deployment_id: { - type: 'string', - format: 'uuid' - }, - latitude: { - type: 'number' - }, - longitude: { - type: 'number' - }, - acquisition_date: { - type: 'string' - } - } - } - } - } - } - } -}; - -export function updateManualTelemetry(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - const bctwService = new BctwTelemetryService(user); - try { - const result = await bctwService.updateManualTelemetry(req.body); - return res.status(201).json(result); - } catch (error) { - defaultLog.error({ label: 'updateManualTelemetry', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/manual/process.ts b/api/src/paths/telemetry/manual/process.ts deleted file mode 100644 index 6a4f22b287..0000000000 --- a/api/src/paths/telemetry/manual/process.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_PERMISSION, SYSTEM_ROLE } from '../../../constants/roles'; -import { getDBConnection } from '../../../database/db'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { getBctwUser } from '../../../services/bctw-service/bctw-service'; -import { TelemetryService } from '../../../services/telemetry-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('/api/telemetry/manual/process'); - -export const POST: Operation = [ - authorizeRequestHandler((req) => { - return { - or: [ - { - validProjectPermissions: [PROJECT_PERMISSION.COORDINATOR, PROJECT_PERMISSION.COLLABORATOR], - projectId: Number(req.body.project_id), - discriminator: 'ProjectPermission' - }, - { - validSystemRoles: [SYSTEM_ROLE.DATA_ADMINISTRATOR], - discriminator: 'SystemRole' - } - ] - }; - }), - processFile() -]; - -POST.apiDoc = { - description: 'Processes and validates telemetry CSV submission', - tags: ['survey', 'telemetry', 'csv'], - security: [ - { - Bearer: [] - } - ], - requestBody: { - description: 'Request body', - required: true, - content: { - 'application/json': { - schema: { - type: 'object', - additionalProperties: false, - required: ['submission_id'], - properties: { - submission_id: { - description: 'The ID of the submission to validate', - type: 'integer' - } - } - } - } - } - }, - responses: { - 200: { - description: 'Validation results of the telemetry submission', - content: { - 'application/json': { - schema: { - type: 'object', - additionalProperties: false, - properties: { - success: { - type: 'boolean', - description: 'A flag determining if the file was processed' - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function processFile(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const submissionId = req.body.submission_id; - const connection = getDBConnection(req.keycloak_token); - try { - await connection.open(); - - const service = new TelemetryService(connection); - - await service.processTelemetryCsvSubmission(submissionId, user); - - res.status(200).json({ success: true }); - - await connection.commit(); - } catch (error: any) { - defaultLog.error({ label: 'processFile', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/telemetry/vendor/deployments.test.ts b/api/src/paths/telemetry/vendor/deployments.test.ts deleted file mode 100644 index c1a0d360cd..0000000000 --- a/api/src/paths/telemetry/vendor/deployments.test.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { SystemUser } from '../../../repositories/user-repository'; -import { BctwTelemetryService, IVendorTelemetry } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getRequestHandlerMocks } from '../../../__mocks__/db'; -import { getVendorTelemetryByDeploymentIds } from './deployments'; - -const mockTelemetry: IVendorTelemetry[] = [ - { - telemetry_id: '123-123-123', - deployment_id: '345-345-345', - latitude: 49.123, - longitude: -126.123, - acquisition_date: '2021-01-01', - collar_transaction_id: '45-45-45', - critter_id: '78-78-78', - deviceid: 123456, - elevation: 200, - vendor: 'vendor1' - }, - { - telemetry_id: '456-456-456', - deployment_id: '789-789-789', - latitude: 49.123, - longitude: -126.123, - acquisition_date: '2021-01-01', - collar_transaction_id: '54-54-54', - critter_id: '87-87-87', - deviceid: 654321, - elevation: 10, - vendor: 'vendor2' - } -]; - -describe('getVendorTelemetryByDeploymentIds', () => { - afterEach(() => { - sinon.restore(); - }); - it('should retrieve all vendor telemetry by deployment ids', async () => { - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'getVendorTelemetryByDeploymentIds') - .resolves(mockTelemetry); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getVendorTelemetryByDeploymentIds(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockTelemetry); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetTelemetry).to.have.been.calledOnce; - }); - it('should catch error', async () => { - const mockError = new Error('test error'); - const mockGetTelemetry = sinon - .stub(BctwTelemetryService.prototype, 'getVendorTelemetryByDeploymentIds') - .rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getVendorTelemetryByDeploymentIds(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - } catch (err) { - expect(err).to.equal(mockError); - expect(mockGetTelemetry).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/vendor/deployments.ts b/api/src/paths/telemetry/vendor/deployments.ts deleted file mode 100644 index de4f3e3e90..0000000000 --- a/api/src/paths/telemetry/vendor/deployments.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { getBctwUser } from '../../../services/bctw-service/bctw-service'; -import { BctwTelemetryService } from '../../../services/bctw-service/bctw-telemetry-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/manual'); - -const vendor_telemetry_responses = { - 200: { - description: 'Manual telemetry response object', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - type: 'object', - additionalProperties: false, - properties: { - telemetry_id: { type: 'string', format: 'uuid' }, - deployment_id: { type: 'string', format: 'uuid' }, - collar_transaction_id: { type: 'string', format: 'uuid' }, - critter_id: { type: 'string', format: 'uuid' }, - deviceid: { type: 'number' }, - latitude: { type: 'number', nullable: true }, - longitude: { type: 'number', nullable: true }, - elevation: { type: 'number', nullable: true }, - vendor: { type: 'string', nullable: true }, - acquisition_date: { type: 'string', nullable: true } - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } -}; - -export const POST: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - getVendorTelemetryByDeploymentIds() -]; - -POST.apiDoc = { - description: 'Get a list of vendor retrieved telemetry by deployment ids', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: vendor_telemetry_responses, - requestBody: { - description: 'Request body', - required: true, - content: { - 'application/json': { - schema: { - title: 'Telemetry for Deployment ids', - type: 'array', - minItems: 1, - items: { - title: 'Vendor telemetry deployment ids', - type: 'string', - format: 'uuid' - } - } - } - } - } -}; - -export function getVendorTelemetryByDeploymentIds(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwService = new BctwTelemetryService(user); - try { - const result = await bctwService.getVendorTelemetryByDeploymentIds(req.body); - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getManualTelemetryByDeploymentIds', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/telemetry/vendors.test.ts b/api/src/paths/telemetry/vendors.test.ts deleted file mode 100644 index 329ea891fe..0000000000 --- a/api/src/paths/telemetry/vendors.test.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { expect } from 'chai'; -import sinon from 'sinon'; -import { SystemUser } from '../../repositories/user-repository'; -import { BctwDeviceService } from '../../services/bctw-service/bctw-device-service'; -import { getRequestHandlerMocks } from '../../__mocks__/db'; -import { getCollarVendors } from './vendors'; - -describe('getCollarVendors', () => { - afterEach(() => { - sinon.restore(); - }); - - it('gets collar vendors', async () => { - const mockVendors = ['vendor1', 'vendor2']; - const mockGetCollarVendors = sinon.stub(BctwDeviceService.prototype, 'getCollarVendors').resolves(mockVendors); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getCollarVendors(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.jsonValue).to.eql(mockVendors); - expect(mockRes.statusValue).to.equal(200); - expect(mockGetCollarVendors).to.have.been.calledOnce; - }); - - it('catches and re-throws error', async () => { - const mockError = new Error('a test error'); - - const mockGetCollarVendors = sinon.stub(BctwDeviceService.prototype, 'getCollarVendors').rejects(mockError); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.system_user = { user_identifier: 'user', user_guid: 'guid' } as SystemUser; - - const requestHandler = getCollarVendors(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(actualError).to.equal(mockError); - expect(mockGetCollarVendors).to.have.been.calledOnce; - } - }); -}); diff --git a/api/src/paths/telemetry/vendors.ts b/api/src/paths/telemetry/vendors.ts deleted file mode 100644 index e9ea0e3561..0000000000 --- a/api/src/paths/telemetry/vendors.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { BctwDeviceService } from '../../services/bctw-service/bctw-device-service'; -import { getBctwUser } from '../../services/bctw-service/bctw-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('paths/telemetry/vendors'); - -export const GET: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - discriminator: 'SystemUser' - } - ] - }; - }), - getCollarVendors() -]; - -GET.apiDoc = { - description: 'Get a list of supported collar vendors.', - tags: ['telemetry'], - security: [ - { - Bearer: [] - } - ], - responses: { - 200: { - description: 'Collar vendors response object.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - type: 'string' - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getCollarVendors(): RequestHandler { - return async (req, res) => { - const user = getBctwUser(req); - - const bctwDeviceService = new BctwDeviceService(user); - - try { - const result = await bctwDeviceService.getCollarVendors(); - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getCollarVendors', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/repositories/code-repository.ts b/api/src/repositories/code-repository.ts index 53e9df3c85..a70480672c 100644 --- a/api/src/repositories/code-repository.ts +++ b/api/src/repositories/code-repository.ts @@ -25,6 +25,8 @@ const SurveyProgressCode = ICode.extend({ description: z.string() }); const MethodResponseMetricsCode = ICode.extend({ description: z.string() }); const AttractantCode = ICode.extend({ description: z.string() }); const ObservationSubcountSignCode = ICode.extend({ description: z.string() }); +const DeviceMakeCode = ICode.extend({ description: z.string() }); +const FrequencyUnitCode = ICode.extend({ description: z.string() }); const AlertTypeCode = ICode.extend({ description: z.string() }); const VantageCode = ICode.extend({ description: z.string() }); @@ -49,6 +51,8 @@ export const IAllCodeSets = z.object({ method_response_metrics: CodeSet(MethodResponseMetricsCode.shape), attractants: CodeSet(AttractantCode.shape), observation_subcount_signs: CodeSet(ObservationSubcountSignCode.shape), + telemetry_device_makes: CodeSet(DeviceMakeCode.shape), + frequency_units: CodeSet(FrequencyUnitCode.shape), alert_types: CodeSet(AlertTypeCode.shape), vantages: CodeSet(VantageCode.shape) }); @@ -63,9 +67,9 @@ export class CodeRepository extends BaseRepository { */ async getSampleMethods() { const sql = SQL` - SELECT - method_lookup_id as id, - name, + SELECT + method_lookup_id as id, + name, description FROM method_lookup ORDER BY name ASC; @@ -108,7 +112,7 @@ export class CodeRepository extends BaseRepository { first_nations_id as id, name FROM first_nations - WHERE record_end_date is null + WHERE record_end_date is null ORDER BY name ASC; `; @@ -129,7 +133,7 @@ export class CodeRepository extends BaseRepository { agency_id as id, name FROM agency - WHERE record_end_date is null + WHERE record_end_date is null ORDER BY name ASC; `; @@ -148,7 +152,7 @@ export class CodeRepository extends BaseRepository { const sqlStatement = SQL` SELECT proprietor_type_id as id, - name, + name, is_first_nation FROM proprietor_type WHERE record_end_date is null; @@ -190,7 +194,7 @@ export class CodeRepository extends BaseRepository { const sqlStatement = SQL` SELECT intended_outcome_id as id, - name, + name, description FROM intended_outcome WHERE record_end_date is null; @@ -214,7 +218,7 @@ export class CodeRepository extends BaseRepository { agency_id, name FROM investment_action_category - WHERE record_end_date is null + WHERE record_end_date is null ORDER BY name ASC; `; @@ -318,7 +322,7 @@ export class CodeRepository extends BaseRepository { name FROM project_role WHERE record_end_date is null - ORDER BY + ORDER BY CASE WHEN name = 'Coordinator' THEN 0 ELSE 1 END; `; @@ -471,6 +475,50 @@ export class CodeRepository extends BaseRepository { return response.rows; } + /** + * Get active telemetry device makes. + * + * @return {*} + * @memberof CodeRepository + */ + async getActiveTelemetryDeviceMakes() { + const sqlStatement = SQL` + SELECT + device_make_id as id, + name, + description + FROM device_make + WHERE record_end_date is null + -- Some legacy device makes have no effective date, as they are no longer supported, and must be excluded + AND record_effective_date IS NOT NULL; + `; + + const response = await this.connection.sql(sqlStatement, DeviceMakeCode); + + return response.rows; + } + + /** + * Get frequency unit codes. + * + * @return {*} + * @memberof CodeRepository + */ + async getFrequencyUnits() { + const sqlStatement = SQL` + SELECT + frequency_unit_id as id, + name, + description + FROM frequency_unit + WHERE record_end_date is null; + `; + + const response = await this.connection.sql(sqlStatement, FrequencyUnitCode); + + return response.rows; + } + /** * Fetch alert type codes * diff --git a/api/src/repositories/deployment-repository.ts b/api/src/repositories/deployment-repository.ts deleted file mode 100644 index af383837dd..0000000000 --- a/api/src/repositories/deployment-repository.ts +++ /dev/null @@ -1,177 +0,0 @@ -import { getKnex } from '../database/db'; -import { ICreateSurveyDeployment, IUpdateSurveyDeployment, SurveyDeployment } from '../models/survey-deployment'; -import { getLogger } from '../utils/logger'; -import { BaseRepository } from './base-repository'; - -const defaultLog = getLogger('repositories/deployment'); - -/** - * Repository layer for survey deployments - * - * @export - * @class DeploymentRepository - * @extends {BaseRepository} - */ -export class DeploymentRepository extends BaseRepository { - /** - * Returns deployments in a survey - * - * @param {number} surveyId - * @return {*} {Promise} - * @memberof DeploymentRepository - */ - async getDeploymentsForSurveyId(surveyId: number): Promise { - defaultLog.debug({ label: 'getDeploymentsForSurveyId', surveyId }); - - const queryBuilder = getKnex() - .select( - 'deployment_id', - 'd.critter_id as critter_id', - 'c.critterbase_critter_id', - 'bctw_deployment_id', - 'critterbase_start_capture_id', - 'critterbase_end_capture_id', - 'critterbase_end_mortality_id' - ) - .from('deployment as d') - .leftJoin('critter as c', 'c.critter_id', 'd.critter_id') - .where('c.survey_id', surveyId); - - const response = await this.connection.knex(queryBuilder, SurveyDeployment); - - return response.rows; - } - - /** - * Returns a specific deployment - * - * @param {number} deploymentId - * @return {*} {Promise} - * @memberof DeploymentRepository - */ - async getDeploymentById(deploymentId: number): Promise { - defaultLog.debug({ label: 'getDeploymentById', deploymentId }); - - const queryBuilder = getKnex() - .select( - 'deployment_id', - 'd.critter_id as critter_id', - 'c.critterbase_critter_id', - 'bctw_deployment_id', - 'critterbase_start_capture_id', - 'critterbase_end_capture_id', - 'critterbase_end_mortality_id' - ) - .from('deployment as d') - .leftJoin('critter as c', 'c.critter_id', 'd.critter_id') - .where('d.deployment_id', deploymentId); - - const response = await this.connection.knex(queryBuilder, SurveyDeployment); - - return response.rows[0]; - } - - /** - * Returns a specific deployment for a given critter Id - * - * @param {number} surveyId - * @param {number} critterId - * @return {*} {Promise} - * @memberof DeploymentRepository - */ - async getDeploymentForCritterId(surveyId: number, critterId: number): Promise { - defaultLog.debug({ label: 'getDeploymentById', critterId }); - - const queryBuilder = getKnex() - .select( - 'deployment_id', - 'd.critter_id as critter_id', - 'c.critterbase_critter_id', - 'bctw_deployment_id', - 'critterbase_start_capture_id', - 'critterbase_end_capture_id', - 'critterbase_end_mortality_id' - ) - .from('deployment as d') - .leftJoin('critter as c', 'c.critter_id', 'd.critter_id') - .where('d.critter_id', critterId) - .andWhere('c.survey_id', surveyId); - - const response = await this.connection.knex(queryBuilder, SurveyDeployment); - - return response.rows[0]; - } - - /** - * Insert a new deployment record. - * - * @param {ICreateSurveyDeployment} deployment - * @return {*} {Promise} - * @memberof DeploymentRepository - */ - async insertDeployment(deployment: ICreateSurveyDeployment): Promise { - defaultLog.debug({ label: 'insertDeployment', bctw_deployment_id: deployment.bctw_deployment_id }); - - const queryBuilder = getKnex().table('deployment').insert({ - critter_id: deployment.critter_id, - bctw_deployment_id: deployment.bctw_deployment_id, - critterbase_start_capture_id: deployment.critterbase_start_capture_id, - critterbase_end_capture_id: deployment.critterbase_end_capture_id, - critterbase_end_mortality_id: deployment.critterbase_end_mortality_id - }); - - await this.connection.knex(queryBuilder); - } - - /** - * Update an existing deployment record. - * - * @param {IUpdateSurveyDeployment} deployment - * @return {*} {Promise} - * @memberof DeploymentRepository - */ - async updateDeployment(deployment: IUpdateSurveyDeployment): Promise { - defaultLog.debug({ label: 'updateDeployment', deployment_id: deployment.deployment_id }); - - const queryBuilder = getKnex() - .table('deployment') - .where('deployment_id', deployment.deployment_id) - .update({ - critter_id: deployment.critter_id, - critterbase_start_capture_id: deployment.critterbase_start_capture_id, - critterbase_end_capture_id: deployment.critterbase_end_capture_id, - critterbase_end_mortality_id: deployment.critterbase_end_mortality_id - }) - .returning('bctw_deployment_id'); - - const response = await this.connection.knex(queryBuilder); - - return response.rows[0].bctw_deployment_id; - } - - /** - * Deletes a deployment row. - * - * @param {number} surveyId - * @param {number} deploymentId - * @return {*} - * @memberof DeploymentRepository - */ - async deleteDeployment(surveyId: number, deploymentId: number): Promise<{ bctw_deployment_id: string }> { - defaultLog.debug({ label: 'deleteDeployment', deploymentId }); - - const queryBuilder = getKnex() - .table('deployment') - .join('critter', 'deployment.critter_id', 'critter.critter_id') - .where({ - 'deployment.deployment_id': deploymentId, - 'critter.survey_id': surveyId - }) - .delete() - .returning('bctw_deployment_id'); - - const response = await this.connection.knex(queryBuilder); - - return response.rows[0]; - } -} diff --git a/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.interface.ts b/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.interface.ts new file mode 100644 index 0000000000..75fde561c8 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.interface.ts @@ -0,0 +1,41 @@ +import { z } from 'zod'; +import { CritterRecord } from '../../database-models/critter'; +import { DeploymentRecord } from '../../database-models/deployment'; +import { DeviceRecord } from '../../database-models/device'; + +export const CreateDeployment = DeploymentRecord.omit({ + // Database generated columns + device_key: true, + attachment_start_timestamp: true, + attachment_end_timestamp: true, + // Primary key + deployment_id: true +}); + +export type CreateDeployment = z.infer; + +export const ExtendedDeploymentRecord = DeploymentRecord.merge( + DeviceRecord.pick({ + serial: true, + device_make_id: true, + model: true + }).merge( + CritterRecord.pick({ + critterbase_critter_id: true + }) + ) +); + +export type ExtendedDeploymentRecord = z.infer; + +export const UpdateDeployment = DeploymentRecord.omit({ + // Database generated columns + device_key: true, + attachment_start_timestamp: true, + attachment_end_timestamp: true, + // Primary key and survey not updatable + deployment_id: true, + survey_id: true +}); + +export type UpdateDeployment = z.infer; diff --git a/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.test.ts b/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.test.ts new file mode 100644 index 0000000000..f319cc35fa --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.test.ts @@ -0,0 +1,369 @@ +import { expect } from 'chai'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import { ApiExecuteSQLError } from '../../errors/api-error'; +import { getMockDBConnection } from '../../__mocks__/db'; +import { TelemetryDeploymentRepository } from './telemetry-deployment-repository'; +import { CreateDeployment, UpdateDeployment } from './telemetry-deployment-repository.interface'; + +describe('TelemetryDeploymentRepository', () => { + beforeEach(() => {}); + + afterEach(() => { + sinon.restore(); + }); + + describe('createDeployment', () => { + it('should create a deployment successfully', async () => { + const mockResponse = { + rowCount: 1, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const deployment: CreateDeployment = { + survey_id: 1, + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null + }; + + await telemetryDeploymentRepository.createDeployment(deployment); + + expect(mockDbConnection.sql).to.have.been.calledOnce; + }); + + it('should throw an error if the deployment creation fails', async () => { + const mockResponse = { + rowCount: 0, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const deployment: CreateDeployment = { + survey_id: 1, + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null + }; + + try { + await telemetryDeploymentRepository.createDeployment(deployment); + } catch (error) { + expect(error).to.be.instanceOf(ApiExecuteSQLError); + expect((error as ApiExecuteSQLError).message).to.equal('Failed to create deployment'); + } + }); + }); + + describe('getDeploymentsForSurvey', () => { + it('should get deployments by survey ID successfully', async () => { + const mockDeploymentRecord = { + deployment_id: 1, + survey_id: 1, + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null, + serial: '1234', + device_make_id: 1, + model: 'Model', + critterbase_critter_id: 1 + }; + + const mockResponse = { + rowCount: 1, + rows: [mockDeploymentRecord] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ knex: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + + const response = await telemetryDeploymentRepository.getDeploymentsForSurvey(surveyId); + + expect(response).to.eql([mockDeploymentRecord]); + }); + + it('should get a deployment by ID successfully', async () => { + const mockDeploymentRecord = { + deployment_id: 1, + survey_id: 1, + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null, + serial: '1234', + device_make_id: 1, + model: 'Model', + critterbase_critter_id: 1 + }; + + const mockResponse = { + rowCount: 1, + rows: [mockDeploymentRecord] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ knex: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const deploymentId = 2; + + const response = await telemetryDeploymentRepository.getDeploymentsForSurvey(surveyId, [deploymentId]); + + expect(response).to.eql([mockDeploymentRecord]); + }); + + it('should throw an error if the deployment is not found', async () => { + const mockResponse = { + rowCount: 0, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ knex: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const deploymentId = 2; + + try { + await telemetryDeploymentRepository.getDeploymentsForSurvey(surveyId, [deploymentId]); + } catch (error) { + expect(error).to.be.instanceOf(ApiExecuteSQLError); + expect((error as ApiExecuteSQLError).message).to.equal('Failed to get deployment'); + } + }); + }); + + describe('getDeploymentsForCritterId', () => { + it('should get deployments by critter ID successfully', async () => { + const mockDeploymentRecord = { + deployment_id: 1, + survey_id: 1, + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null, + serial: '1234', + device_make_id: 1, + model: 'Model', + critterbase_critter_id: 1 + }; + + const mockResponse = { + rowCount: 1, + rows: [mockDeploymentRecord] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const critterId = 2; + + const response = await telemetryDeploymentRepository.getDeploymentsForCritterId(surveyId, critterId); + + expect(response).to.eql([mockDeploymentRecord]); + }); + }); + + describe('updateDeployment', () => { + it('should update a deployment successfully', async () => { + const mockResponse = { + rowCount: 1, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const updateDeployment: UpdateDeployment = { + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null + }; + + const surveyId = 1; + const deploymentId = 2; + + await telemetryDeploymentRepository.updateDeployment(surveyId, deploymentId, updateDeployment); + + expect(mockDbConnection.sql).to.have.been.calledOnce; + }); + + it('should throw an error if the deployment update fails', async () => { + const mockResponse = { + rowCount: 0, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const updateDeployment: UpdateDeployment = { + critter_id: 1, + device_id: 1, + frequency: 1, + frequency_unit_id: 1, + attachment_start_date: '2023-01-01', + attachment_start_time: '12:00:00', + attachment_end_date: '2023-01-02', + attachment_end_time: '12:00:00', + critterbase_start_capture_id: '123-456-789', + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null + }; + + const surveyId = 1; + const deploymentId = 2; + + try { + await telemetryDeploymentRepository.updateDeployment(surveyId, deploymentId, updateDeployment); + } catch (error) { + expect(error).to.be.instanceOf(ApiExecuteSQLError); + expect((error as ApiExecuteSQLError).message).to.equal('Failed to update deployment'); + } + }); + }); + + describe('deleteDeployment', () => { + it('should delete a deployment successfully', async () => { + const mockResponse = { + rowCount: 1, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const deploymentId = 2; + + await telemetryDeploymentRepository.deleteDeployment(surveyId, deploymentId); + + expect(mockDbConnection.sql).to.have.been.calledOnce; + }); + + it('should throw an error if the deployment deletion fails', async () => { + const mockResponse = { + rowCount: 0, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ sql: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const deploymentId = 2; + + try { + await telemetryDeploymentRepository.deleteDeployment(surveyId, deploymentId); + } catch (error) { + expect(error).to.be.instanceOf(ApiExecuteSQLError); + expect((error as ApiExecuteSQLError).message).to.equal('Failed to delete deployment'); + } + }); + }); + + describe('deleteDeployments', () => { + it('should delete multiple deployments successfully', async () => { + const mockResponse = { + rowCount: 3, + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ knex: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const deploymentIds = [1, 2, 3]; + + await telemetryDeploymentRepository.deleteDeployments(surveyId, deploymentIds); + + expect(mockDbConnection.knex).to.have.been.calledOnce; + }); + + it('should throw an error if the multiple deployments deletion fails', async () => { + const mockResponse = { + rowCount: 2, // rowCount is less than the number of deploymentIds + rows: [] + } as any as Promise>; + + const mockDbConnection = getMockDBConnection({ knex: sinon.stub().resolves(mockResponse) }); + + const telemetryDeploymentRepository = new TelemetryDeploymentRepository(mockDbConnection); + + const surveyId = 1; + const deploymentIds = [1, 2, 3]; + + try { + await telemetryDeploymentRepository.deleteDeployments(surveyId, deploymentIds); + } catch (error) { + expect(error).to.be.instanceOf(ApiExecuteSQLError); + expect((error as ApiExecuteSQLError).message).to.equal('Failed to delete deployments'); + } + }); + }); +}); diff --git a/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.ts b/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.ts new file mode 100644 index 0000000000..0abf95d72d --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-deployment-repository.ts @@ -0,0 +1,395 @@ +import SQL from 'sql-template-strings'; +import { z } from 'zod'; +import { DeploymentRecord } from '../../database-models/deployment'; +import { getKnex } from '../../database/db'; +import { ApiExecuteSQLError } from '../../errors/api-error'; +import { IDeploymentAdvancedFilters } from '../../models/deployment-view'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { BaseRepository } from '../base-repository'; +import { + CreateDeployment, + ExtendedDeploymentRecord, + UpdateDeployment +} from './telemetry-deployment-repository.interface'; + +/** + * Repository class for working with deployments. + * + * @export + * @class TelemetryDeploymentRepository + * @extends {BaseRepository} + */ +export class TelemetryDeploymentRepository extends BaseRepository { + /** + * Create a deployment. + * + * @param {CreateDeployment} deployment The deployment data to create + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async createDeployment(deployment: CreateDeployment): Promise { + const sqlStatement = SQL` + INSERT INTO deployment ( + survey_id, + critter_id, + device_id, + frequency, + frequency_unit_id, + attachment_start_date, + attachment_start_time, + attachment_end_date, + attachment_end_time, + critterbase_start_capture_id, + critterbase_end_capture_id, + critterbase_end_mortality_id + ) VALUES ( + ${deployment.survey_id}, + ${deployment.critter_id}, + ${deployment.device_id}, + ${deployment.frequency}, + ${deployment.frequency_unit_id}, + ${deployment.attachment_start_date}, + ${deployment.attachment_start_time}, + ${deployment.attachment_end_date}, + ${deployment.attachment_end_time}, + ${deployment.critterbase_start_capture_id}, + ${deployment.critterbase_end_capture_id}, + ${deployment.critterbase_end_mortality_id} + ); + `; + + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to create deployment', [ + 'TelemetryDeploymentRepository->createDeployment', + 'rowCount was != 1, expected rowCount = 1' + ]); + } + } + + /** + * Retrieves the paginated list of deployments under a survey, based on the provided filter params. + * + * @param {number} surveyId + * @param {number[]} [deploymentIds] + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async getDeploymentsForSurvey( + surveyId: number, + deploymentIds?: number[], + pagination?: ApiPaginationOptions + ): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .select( + // deployment data + 'deployment.deployment_id', + 'deployment.survey_id', + 'deployment.critter_id', + 'deployment.device_id', + 'deployment.device_key', + 'deployment.frequency', + 'deployment.frequency_unit_id', + 'deployment.attachment_start_date', + 'deployment.attachment_start_time', + 'deployment.attachment_start_timestamp', + 'deployment.attachment_end_date', + 'deployment.attachment_end_time', + 'deployment.attachment_end_timestamp', + 'deployment.critterbase_start_capture_id', + 'deployment.critterbase_end_capture_id', + 'deployment.critterbase_end_mortality_id', + // device data + 'device.serial', + 'device.device_make_id', + 'device.model', + // critter data + 'critter.critterbase_critter_id' + ) + .from('deployment') + .innerJoin('survey', 'deployment.survey_id', 'survey.survey_id') + .innerJoin('device', 'deployment.device_id', 'device.device_id') + .innerJoin('critter', 'deployment.critter_id', 'critter.critter_id') + .where('deployment.survey_id', surveyId); + + if (deploymentIds?.length) { + // Filter results by deployment IDs + queryBuilder.whereIn('deployment.deployment_id', deploymentIds); + } + + if (pagination) { + queryBuilder.limit(pagination.limit).offset((pagination.page - 1) * pagination.limit); + + if (pagination.sort && pagination.order) { + queryBuilder.orderBy(pagination.sort, pagination.order); + } + } + + const response = await this.connection.knex(queryBuilder, ExtendedDeploymentRecord); + + return response.rows; + } + + /** + * Retrieves the paginated list of all deployments that are available to the user, based on their permissions and + * provided filter criteria. + * + * @param {boolean} isUserAdmin Whether the user making the request is an admin + * @param {(number | null)} systemUserId The system user id of the user making the request + * @param {IDeploymentAdvancedFilters} filterFields The filter fields to apply + * @param {ApiPaginationOptions} [pagination] The pagination/sorting options to apply + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async findDeployments( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IDeploymentAdvancedFilters, + pagination?: ApiPaginationOptions + ): Promise { + const knex = getKnex(); + + const getSurveyIdsQuery = knex.select(['survey_id']).from('survey'); + + // Ensure that users can only see observations that they are participating in, unless they are an administrator. + if (!isUserAdmin) { + getSurveyIdsQuery.whereIn('survey.project_id', (subqueryBuilder) => + subqueryBuilder + .select('project.project_id') + .from('project') + .leftJoin('project_participation', 'project_participation.project_id', 'project.project_id') + .where('project_participation.system_user_id', systemUserId) + ); + } + + if (filterFields.system_user_id) { + getSurveyIdsQuery.whereIn('p.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + const queryBuilder = knex + .queryBuilder() + .select( + // deployment data + 'deployment.deployment_id', + 'deployment.survey_id', + 'deployment.critter_id', + 'deployment.device_id', + 'deployment.device_key', + 'deployment.frequency', + 'deployment.frequency_unit_id', + 'deployment.attachment_start_date', + 'deployment.attachment_start_time', + 'deployment.attachment_start_timestamp', + 'deployment.attachment_end_date', + 'deployment.attachment_end_time', + 'deployment.attachment_end_timestamp', + 'deployment.critterbase_start_capture_id', + 'deployment.critterbase_end_capture_id', + 'deployment.critterbase_end_mortality_id', + // device data + 'device.serial', + 'device.device_make_id', + 'device.model', + // critter data + 'critter.critterbase_critter_id' + ) + .from('deployment') + .innerJoin('survey', 'deployment.survey_id', 'survey.survey_id') + .innerJoin('device', 'deployment.device_id', 'device.device_id') + .innerJoin('critter', 'deployment.critter_id', 'critter.critter_id') + .whereIn('deployment.survey_id', getSurveyIdsQuery); + + if (filterFields.survey_ids?.length) { + // Filter results by survey IDs + queryBuilder.whereIn('survey.survey_id', filterFields.survey_ids); + } + + if (filterFields.deployment_ids?.length) { + // Filter results by deployment IDs + queryBuilder.whereIn('deployment.deployment_id', filterFields.deployment_ids); + } + + if (filterFields.system_user_id) { + // If a system user ID is provided, filter results by the projects/surveys that user has access to + queryBuilder.whereIn('survey.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + if (pagination) { + queryBuilder.limit(pagination.limit).offset((pagination.page - 1) * pagination.limit); + + if (pagination.sort && pagination.order) { + queryBuilder.orderBy(pagination.sort, pagination.order); + } + } + + const response = await this.connection.knex(queryBuilder, ExtendedDeploymentRecord); + + return response.rows; + } + + /** + * Get deployments for a critter ID. + * + * @param {number} surveyId The survey ID + * @param {number} critterId The critter ID + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async getDeploymentsForCritterId(surveyId: number, critterId: number): Promise { + const sqlStatement = SQL` + SELECT + deployment_id, + survey_id, + critter_id, + device_id, + device_key, + frequency, + frequency_unit_id, + attachment_start_date, + attachment_start_time, + attachment_start_timestamp, + attachment_end_date, + attachment_end_time, + attachment_end_timestamp, + critterbase_start_capture_id, + critterbase_end_capture_id, + critterbase_end_mortality_id + FROM + deployment + WHERE + critter_id = ${critterId} AND + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement, DeploymentRecord); + + return response.rows; + } + + /** + * Get the total count of all deployments for a survey. + * + * @param {number} surveyId + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async getDeploymentsCount(surveyId: number): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .select(knex.raw('count(*)::integer as count')) + .from('deployment') + .where('survey_id', surveyId); + + const response = await this.connection.knex(queryBuilder, z.object({ count: z.number() })); + + return response.rows[0].count; + } + /** + * Update a deployment. + * + * @param {number} surveyId The survey ID + * @param {number} deployment_id The deployment ID + * @param {UpdateDeployment} deployment The deployment data to update + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async updateDeployment(surveyId: number, deployment_id: number, deployment: UpdateDeployment): Promise { + const sqlStatement = SQL` + UPDATE + deployment + SET + critter_id = ${deployment.critter_id}, + device_id = ${deployment.device_id}, + frequency = ${deployment.frequency}, + frequency_unit_id = ${deployment.frequency_unit_id}, + attachment_start_date = ${deployment.attachment_start_date}, + attachment_start_time = ${deployment.attachment_start_time}, + attachment_end_date = ${deployment.attachment_end_date}, + attachment_end_time = ${deployment.attachment_end_time}, + critterbase_start_capture_id = ${deployment.critterbase_start_capture_id}, + critterbase_end_capture_id = ${deployment.critterbase_end_capture_id}, + critterbase_end_mortality_id = ${deployment.critterbase_end_mortality_id} + WHERE + deployment_id = ${deployment_id} AND + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to update deployment', [ + 'TelemetryDeploymentRepository->updateDeployment', + 'rowCount was != 1, expected rowCount = 1' + ]); + } + } + + /** + * Delete a deployment. + * + * @param {number} surveyId The survey ID + * @param {number} deploymentId The deployment ID + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async deleteDeployment(surveyId: number, deploymentId: number): Promise { + const sqlStatement = SQL` + DELETE FROM + deployment + WHERE + deployment_id = ${deploymentId} AND + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to delete deployment', [ + 'TelemetryDeploymentRepository->deleteDeployment', + 'rowCount was != 1, expected rowCount = 1' + ]); + } + } + + /** + * Delete multiple deployments. + * + * @param {number} surveyId The survey ID + * @param {number[]} deploymentIds The deployment IDs + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async deleteDeployments(surveyId: number, deploymentIds: number[]): Promise { + const queryBuilder = getKnex() + .queryBuilder() + .delete() + .from('deployment') + .whereIn('deployment_id', deploymentIds) + .andWhere('survey_id', surveyId); + + const response = await this.connection.knex(queryBuilder); + + if (response.rowCount !== deploymentIds.length) { + throw new ApiExecuteSQLError('Failed to delete deployments', [ + 'TelemetryDeploymentRepository->deleteDeployment', + `rowCount was ${response.rowCount}, expected rowCount = ${deploymentIds.length}` + ]); + } + } +} diff --git a/api/src/repositories/telemetry-repositories/telemetry-device-repository.interface.ts b/api/src/repositories/telemetry-repositories/telemetry-device-repository.interface.ts new file mode 100644 index 0000000000..f5b0893d33 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-device-repository.interface.ts @@ -0,0 +1,33 @@ +import { DeviceRecord } from '../../database-models/device'; + +/** + * Interface reflecting the telemetry device data required to create a new device + * + */ +export type CreateTelemetryDevice = Pick; + +/** + * Interface reflecting the telemetry device data required to update an existing device + * + */ +export type UpdateTelemetryDevice = Partial>; + +/** + * Interface for the advanced filters that can be applied to the find devices request. + * + */ +export type DeviceAdvancedFilters = { + /** + * The keyword to search for in the device model, comment, or serial number + */ + keyword?: string; + /** + * The system user id to filter devices by. + * + * Note: This is not the system user id of the user making the request, but the system user id of the user whose + * devices you want to return. + * + * @type {number} + */ + system_user_id?: number; +}; diff --git a/api/src/repositories/telemetry-repositories/telemetry-device-repository.test.ts b/api/src/repositories/telemetry-repositories/telemetry-device-repository.test.ts new file mode 100644 index 0000000000..6fe56a22c8 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-device-repository.test.ts @@ -0,0 +1,105 @@ +import chai, { expect } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { getMockDBConnection } from '../../__mocks__/db'; +import { TelemetryDeviceRepository } from './telemetry-device-repository'; + +chai.use(sinonChai); + +describe('TelemetryDeviceRepository', () => { + it('should construct', () => { + const mockDBConnection = getMockDBConnection(); + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + expect(telemetryDeviceRepository).to.be.instanceof(TelemetryDeviceRepository); + }); + + describe('getDevicesByIds', () => { + it('should get devices by IDs', async () => { + const mockRows = [{ device_id: 1 }]; + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: mockRows }) }); + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + const response = await telemetryDeviceRepository.getDevicesByIds(1, [1]); + expect(response).to.eql(mockRows); + }); + }); + + describe('findDeviceBySerial', () => { + it('should find a device with a given serial and make in the given survey', async () => { + const mockRow = { device_id: 1 }; + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: [mockRow] }) }); + + const mockSurvey = 1; + const mockSerial = 12345; + const mockDeviceMakeId = 5; + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + const response = await telemetryDeviceRepository.findDeviceBySerial(mockSurvey, mockSerial, mockDeviceMakeId); + expect(response).to.eql(mockRow); + }); + }); + + describe('deleteDevicesByIds', () => { + it('should delete devices by IDs', async () => { + const mockRows = [{ device_id: 1 }]; + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: mockRows }) }); + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + const response = await telemetryDeviceRepository.deleteDevicesByIds(1, [1]); + expect(response).to.eql(mockRows); + }); + }); + + describe('createDevice', () => { + it('should create a new device', async () => { + const mockRows = [{ device_id: 1 }]; + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: mockRows, rowCount: 1 }) }); + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + const response = await telemetryDeviceRepository.createDevice({ device_id: 1 } as any); + expect(response).to.eql({ device_id: 1 }); + }); + + it('should throw an error if unable to create a new device', async () => { + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: [], rowCount: 0 }) }); + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + try { + await telemetryDeviceRepository.createDevice({ device_id: 1 } as any); + expect.fail(); + } catch (err: any) { + expect(err.message).to.equal('Device was not created'); + } + }); + }); + + describe('updateDevice', () => { + it('should update an existing device', async () => { + const mockRows = [{ device_id: 1 }]; + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: mockRows, rowCount: 1 }) }); + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + const response = await telemetryDeviceRepository.updateDevice(1, 2, { comment: 1 } as any); + expect(response).to.eql({ device_id: 1 }); + }); + it('should throw an error if unable to update an existing device', async () => { + const mockDBConnection = getMockDBConnection({ knex: sinon.stub().resolves({ rows: [], rowCount: 0 }) }); + + const telemetryDeviceRepository = new TelemetryDeviceRepository(mockDBConnection); + + try { + await telemetryDeviceRepository.updateDevice(1, 2, { comment: 1 } as any); + expect.fail(); + } catch (err: any) { + expect(err.message).to.equal('Device was not updated'); + } + }); + }); +}); diff --git a/api/src/repositories/telemetry-repositories/telemetry-device-repository.ts b/api/src/repositories/telemetry-repositories/telemetry-device-repository.ts new file mode 100644 index 0000000000..ba4c6c3f95 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-device-repository.ts @@ -0,0 +1,216 @@ +import { z } from 'zod'; +import { DeviceRecord } from '../../database-models/device'; +import { getKnex } from '../../database/db'; +import { ApiExecuteSQLError } from '../../errors/api-error'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { BaseRepository } from '../base-repository'; +import { + CreateTelemetryDevice, + DeviceAdvancedFilters, + UpdateTelemetryDevice +} from './telemetry-device-repository.interface'; +import { makeFindDevicesQuery } from './telemetry-device-utils'; + +/** + * A repository class for accessing telemetry device data. + * + * @export + * @class TelemetryDeviceRepository + * @extends {BaseRepository} + */ +export class TelemetryDeviceRepository extends BaseRepository { + /** + * Get a list of devices by their IDs. + * + * @param {surveyId} surveyId + * @param {number[]} deviceIds + * @returns {*} {Promise} + */ + async getDevicesByIds(surveyId: number, deviceIds: number[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .select(['device_id', 'survey_id', 'device_key', 'serial', 'device_make_id', 'model', 'comment']) + .from('device') + .whereIn('device_id', deviceIds) + .andWhere('survey_id', surveyId); + + const response = await this.connection.knex(queryBuilder, DeviceRecord); + + return response.rows; + } + + /** + * Finds a device by a given serial number and make in the given survey + * + * @param {surveyId} surveyId + * @param {number} serial + * @param {number} deviceMakeId + * @returns {*} {Promise} + */ + async findDeviceBySerial(surveyId: number, serial: number, deviceMakeId: number): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .select(['device_id', 'survey_id', 'device_key', 'serial', 'device.device_make_id', 'model', 'comment']) + .from('device') + .join('device_make', 'device_make.device_make_id', 'device.device_make_id') + .where('serial', serial) + .andWhere('device.device_make_id', deviceMakeId) + .andWhere('survey_id', surveyId); + + const response = await this.connection.knex(queryBuilder, DeviceRecord); + + return response.rows[0]; + } + + /** + * Retrieve the list of devices for a survey, based on pagination options. + * + * @param {number} surveyId + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryDeviceRepository + */ + async getDevicesForSurvey(surveyId: number, pagination?: ApiPaginationOptions): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .select(['device_id', 'survey_id', 'device_key', 'serial', 'device_make_id', 'model', 'comment']) + .from('device') + .where('survey_id', surveyId); + + if (pagination) { + queryBuilder.limit(pagination.limit).offset((pagination.page - 1) * pagination.limit); + + if (pagination.sort && pagination.order) { + queryBuilder.orderBy(pagination.sort, pagination.order); + } + } + + const response = await this.connection.knex(queryBuilder, DeviceRecord); + + return response.rows; + } + + /** + * Retrieve the list of devices that the user has access to, based on filters and pagination options. + * + * @param {boolean} isUserAdmin Whether the user is an admin. + * @param {number | null} systemUserId The user's ID. + * @param {DeviceAdvancedFilters} filterFields The filter fields to apply. + * @param {ApiPaginationOptions} [pagination] The pagination options. + * @return {Promise} A promise resolving to the list of devices. + */ + async findDevices( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: DeviceAdvancedFilters, + pagination?: ApiPaginationOptions + ): Promise { + const query = makeFindDevicesQuery(isUserAdmin, systemUserId, filterFields); + + if (pagination) { + query.limit(pagination.limit).offset((pagination.page - 1) * pagination.limit); + + if (pagination.sort && pagination.order) { + query.orderBy(pagination.sort, pagination.order); + } + } + + const response = await this.connection.knex(query, DeviceRecord); + + return response.rows; + } + + /** + * Get the total count of all devices for a survey. + * + * @param {number} surveyId + * @return {*} {Promise} + * @memberof TelemetryDeviceRepository + */ + async getDevicesCount(surveyId: number): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .select(knex.raw('count(*)::integer as count')) + .from('device') + .where('survey_id', surveyId); + + const response = await this.connection.knex(queryBuilder, z.object({ count: z.number() })); + + return response.rows[0].count; + } + + /** + * Delete a list of devices by their IDs. + * + * @param {surveyId} surveyId + * @param {number[]} deviceIds + * @returns {*} {Promise>} + */ + async deleteDevicesByIds(surveyId: number, deviceIds: number[]): Promise> { + const knex = getKnex(); + + const queryBuilder = knex + .delete() + .from('device') + .whereIn('device_id', deviceIds) + .andWhere({ survey_id: surveyId }) + .returning(['device_id']); + + const response = await this.connection.knex(queryBuilder, z.object({ device_id: z.number() })); + + return response.rows; + } + + /** + * Create a new device record. + * + * @param {CreateTelemetryDevice} device + * @returns {*} {Promise} + */ + async createDevice(device: CreateTelemetryDevice): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .insert(device) + .into('device') + .returning(['device_id', 'survey_id', 'device_key', 'serial', 'device_make_id', 'model', 'comment']); + + const response = await this.connection.knex(queryBuilder, DeviceRecord); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Device was not created', ['TelemetryDeviceRepository -> createDevice']); + } + + return response.rows[0]; + } + + /** + * Update an existing device record. + * + * @param {surveyId} surveyId + * @param {number} deviceId + * @param {UpdateTelemetryDevice} device + * @returns {*} {Promise} + */ + async updateDevice(surveyId: number, deviceId: number, device: UpdateTelemetryDevice): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .update(device) + .from('device') + .where({ device_id: deviceId, survey_id: surveyId }) + .returning(['device_id', 'survey_id', 'device_key', 'serial', 'device_make_id', 'model', 'comment']); + + const response = await this.connection.knex(queryBuilder, DeviceRecord); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Device was not updated', ['TelemetryDeviceRepository -> updateDevice']); + } + + return response.rows[0]; + } +} diff --git a/api/src/repositories/telemetry-repositories/telemetry-device-utils.ts b/api/src/repositories/telemetry-repositories/telemetry-device-utils.ts new file mode 100644 index 0000000000..fa800869eb --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-device-utils.ts @@ -0,0 +1,74 @@ +import { Knex } from 'knex'; +import { getKnex } from '../../database/db'; +import { DeviceAdvancedFilters } from './telemetry-device-repository.interface'; + +/** + * Generate the devices list query based on user access and filters. + * + * @param {boolean} isUserAdmin + * @param {number | null} systemUserId The system user id of the user making the request + * @param {DeviceAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + */ +export function makeFindDevicesQuery( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: DeviceAdvancedFilters +): Knex.QueryBuilder { + const knex = getKnex(); + + const getSurveyIdsQuery = knex.select(['survey_id']).from('survey'); + + // Ensure that users can only see devices from projects that they are participating in, unless they are an administrator. + if (!isUserAdmin) { + getSurveyIdsQuery.whereIn('survey.project_id', (subqueryBuilder) => + subqueryBuilder + .select('project.project_id') + .from('project') + .leftJoin('project_participation', 'project_participation.project_id', 'project.project_id') + .where('project_participation.system_user_id', systemUserId) + ); + } + + if (filterFields.system_user_id) { + getSurveyIdsQuery.whereIn('p.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + const getDevicesQuery = knex + .select([ + 'device.device_id', + 'device.survey_id', + 'device.device_key', + 'device.serial', + 'device.device_make_id', + 'device.model', + 'device.comment' + ]) + .from('device') + // Join device_make table to get device make name for use in the keyword search + .innerJoin('device_make', 'device.device_make_id', 'device_make.device_make_id') + .whereIn('device.survey_id', getSurveyIdsQuery); + + // Keyword Search filter + if (filterFields.keyword) { + const keywordMatch = `%${filterFields.keyword}%`; + getDevicesQuery.where((subQueryBuilder) => { + subQueryBuilder + .where('device.model', 'ilike', keywordMatch) + .orWhere('device.comment', 'ilike', keywordMatch) + .orWhere('device_make.name', 'ilike', keywordMatch); + + // If the keyword is a number, also match on device serial + if (!isNaN(Number(filterFields.keyword))) { + subQueryBuilder.orWhere('device.serial', Number(filterFields.keyword)); + } + }); + } + + return getDevicesQuery; +} diff --git a/api/src/repositories/telemetry-repositories/telemetry-lotek-repository.interface.ts b/api/src/repositories/telemetry-repositories/telemetry-lotek-repository.interface.ts new file mode 100644 index 0000000000..7e73d131a1 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-lotek-repository.interface.ts @@ -0,0 +1,29 @@ +import { z } from 'zod'; +import { TelemetryLotekRecord } from '../../database-models/telemetry_lotek'; + +/** + * Lotek API query parameters. + * + * @see https://webservice.lotek.com/API/Help + */ +export const LotekAPIQuery = z.object({ + deviceId: z.number(), // serial + dtStart: z.string().optional(), // start date + dtEnd: z.string().optional() // end date +}); + +export type LotekAPIQuery = z.infer; + +export const LotekTask = z.object({ + serial: z.number() // deviceId +}); + +export type LotekTask = z.infer; + +export const LotekPayload = TelemetryLotekRecord.omit({ + telemetry_lotek_id: true, + device_key: true, + geography: true +}); + +export type LotekPayload = z.infer; diff --git a/api/src/repositories/telemetry-repositories/telemetry-lotek-repository.ts b/api/src/repositories/telemetry-repositories/telemetry-lotek-repository.ts new file mode 100644 index 0000000000..50db4d8d49 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-lotek-repository.ts @@ -0,0 +1,57 @@ +import SQL from 'sql-template-strings'; +import { z } from 'zod'; +import { getKnex } from '../../database/db'; +import { BaseRepository } from '../base-repository'; +import { LotekPayload } from './telemetry-lotek-repository.interface'; + +/** + * A repository class for working with raw Lotek telemetry data. + * + * @export + * @class TelemetryLotekRepository + * @extends {BaseRepository} + */ +export class TelemetryLotekRepository extends BaseRepository { + /** + * Create multiple Lotek telemetry records. + * + * @param {LotekPayload[]} telemetry - The telemetry records to create. + * @returns {Promise} The number of telemetry records created. + */ + async createLotekTelemetry(telemetry: LotekPayload[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .insert(telemetry) + .into('telemetry_lotek') + .onConflict(['recdatetime', 'deviceid']) + .ignore(); + + const result = await this.connection.knex(queryBuilder); + + return result.rowCount ?? 0; + } + + /** + * Get the device activity statistics for Lotek telemetry device. + * @returns {Promise<{ serial: number, telemetry_count: number, last_acquisition: string | null }[]>} The device activity statistics. + */ + async getDeviceActivityStatistics() { + const sqlStatement = SQL` + SELECT + deviceid as serial, + COUNT(*)::int AS telemetry_count, + MAX(recdatetime) as last_acquisition + FROM telemetry_lotek + GROUP BY serial; + `; + + const result = await this.connection.sql( + sqlStatement, + z.object({ serial: z.number(), telemetry_count: z.number(), last_acquisition: z.string().nullable() }) + ); + + return result.rows; + } +} diff --git a/api/src/repositories/telemetry-repositories/telemetry-manual-repository.interface.ts b/api/src/repositories/telemetry-repositories/telemetry-manual-repository.interface.ts new file mode 100644 index 0000000000..d80d3d9a3c --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-manual-repository.interface.ts @@ -0,0 +1,10 @@ +import { TelemetryManualRecord } from '../../database-models/telemetry_manual'; + +/** + * Interface reflecting the telemetry manual data required to create a new manual telemetry record. + * + */ +export type CreateManualTelemetry = Pick< + TelemetryManualRecord, + 'deployment_id' | 'latitude' | 'longitude' | 'acquisition_date' | 'transmission_date' +>; diff --git a/api/src/repositories/telemetry-repositories/telemetry-manual-repository.ts b/api/src/repositories/telemetry-repositories/telemetry-manual-repository.ts new file mode 100644 index 0000000000..1302d6e070 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-manual-repository.ts @@ -0,0 +1,109 @@ +import { TelemetryManualRecord } from '../../database-models/telemetry_manual'; +import { getKnex } from '../../database/db'; +import { ApiExecuteSQLError } from '../../errors/api-error'; +import { BaseRepository } from '../base-repository'; +import { CreateManualTelemetry } from './telemetry-manual-repository.interface'; + +/** + * A repository class for working with Manual telemetry data. + * + * @export + * @class TelemetryManualRepository + * @extends {BaseRepository} + */ +export class TelemetryManualRepository extends BaseRepository { + /** + * Get manual telemetry records by their IDs. + * + * @param {number} surveyId - The survey ID + * @param {string[]} telemetryManualIds - List of manual telemetry IDs + * @returns {Promise} + */ + async getManualTelemetryByIds(surveyId: number, telemetryManualIds: string[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .select('*') + .from('telemetry_manual') + .join('deployment', 'telemetry_manual.deployment_id', 'deployment.deployment_id') + .whereIn('telemetry_manual.telemetry_manual_id', telemetryManualIds) + .andWhere('deployment.survey_id', surveyId); + + const response = await this.connection.knex(queryBuilder); + + return response.rows; + } + /** + * Bulk create manual telemetry records. + * + * Note: Deployment IDs need to be pre-validated against the survey ID in the service. + * + * @param {CreateManualTelemetry[]} telemetry - List of manual telemetry data to create + * @returns {Promise} + */ + async bulkCreateManualTelemetry(telemetry: CreateManualTelemetry[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex.insert(telemetry).into('telemetry_manual'); + + const response = await this.connection.knex(queryBuilder); + + if (response.rowCount !== telemetry.length) { + throw new ApiExecuteSQLError('Failed to create manual telemetry records', [ + 'TelemetryManualRepository->bulkCreateManualTelemetry', + `expected rowCount to be ${telemetry.length}, got ${response.rowCount}` + ]); + } + } + + /** + * Bulk update manual telemetry records. + * + * Note: Deployment IDs need to be pre-validated against the survey ID in the service. + * + * @param {TelemetryManualRecord[]} telemetry - List of Manual telemetry data to update + * @returns {Promise} + */ + async bulkUpdateManualTelemetry(telemetry: TelemetryManualRecord[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .insert(telemetry) + .into('telemetry_manual') + .onConflict('telemetry_manual_id') + // intentionally omitting the deployment_id + .merge(['latitude', 'longitude', 'acquisition_date', 'transmission_date']); + + const response = await this.connection.knex(queryBuilder); + + if (response.rowCount !== telemetry.length) { + throw new ApiExecuteSQLError('Failed to update manual telemetry records', [ + 'TelemetryManualRepository->bulkUpdateManualTelemetry', + `expected rowCount to be ${telemetry.length}, got ${response.rowCount}` + ]); + } + } + + /** + * Bulk delete manual telemetry records. + * + * Note: Deployment IDs need to be pre-validated against the survey ID in the service. + * + * @param {string} telemetryManualIds - List of manual telemetry IDs + * @returns {Promise} + */ + async bulkDeleteManualTelemetry(telemetryManualIds: string[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex.delete().from('telemetry_manual').whereIn('telemetry_manual_id', telemetryManualIds); + + const response = await this.connection.knex(queryBuilder); + + if (response.rowCount !== telemetryManualIds.length) { + throw new ApiExecuteSQLError('Failed to delete manual telemetry records', [ + 'TelemetryManualRepository->bulkDeleteManualTelemetry', + `expected rowCount to be ${telemetryManualIds.length}, got ${response.rowCount}` + ]); + } + } +} diff --git a/api/src/repositories/telemetry-repositories/telemetry-vectronic-repository.interface.ts b/api/src/repositories/telemetry-repositories/telemetry-vectronic-repository.interface.ts new file mode 100644 index 0000000000..3e7d5797d0 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-vectronic-repository.interface.ts @@ -0,0 +1,25 @@ +import { z } from 'zod'; +import { TelemetryVectronicRecord } from '../../database-models/telemetry_vectronic'; + +/** + * Interface reflecting the vectronic data required to create a new vectronic telemetry record + * + */ +export type VectronicPayload = Omit; + +const VectronicAPIQuery = z.object({ + idcollar: z.number(), + collarkey: z.string(), + afterAcquisition: z.string().optional(), // Request data after or equal to this date + beforeAcquisition: z.string().optional(), // Request data before or equal to this date + gtId: z.number().optional() // gt-id Request data greater or equal than the isposition +}); + +export type VectronicAPIQuery = z.infer; + +const VectronicTask = z.object({ + serial: z.number(), // idcollar + key: z.string() // collarkey +}); + +export type VectronicTask = z.infer; diff --git a/api/src/repositories/telemetry-repositories/telemetry-vectronic-repository.ts b/api/src/repositories/telemetry-repositories/telemetry-vectronic-repository.ts new file mode 100644 index 0000000000..036ed99575 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-vectronic-repository.ts @@ -0,0 +1,72 @@ +import SQL from 'sql-template-strings'; +import { z } from 'zod'; +import { TelemetryCredentialVectronicRecord } from '../../database-models/telemetry_credential_vectronic'; +import { getKnex } from '../../database/db'; +import { BaseRepository } from '../base-repository'; +import { VectronicPayload } from './telemetry-vectronic-repository.interface'; + +/** + * A repository class for working with raw vectronic telemetry data. + * + * @export + * @class TelemetryVectronicRepository + * @extends {BaseRepository} + */ +export class TelemetryVectronicRepository extends BaseRepository { + async createVectronicTelemetry(telemetry: VectronicPayload[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .insert(telemetry) + .into('telemetry_vectronic') + .onConflict('idposition') + .ignore(); + + const result = await this.connection.knex(queryBuilder); + + return result.rowCount ?? 0; + } + + /** + * Get all Vectronic credentials. + * + * @returns {*} {Promise} + */ + async getAllVectronicCredentials(): Promise { + const sqlStatement = SQL` + SELECT + telemetry_credential_vectronic_id, + device_key, + idcollar, + comtype, + idcom, + collarkey, + collartype + FROM telemetry_credential_vectronic; + `; + const result = await this.connection.sql(sqlStatement, TelemetryCredentialVectronicRecord); + + return result.rows; + } + + async getDeviceActivityStatistics() { + const sqlStatement = SQL` + SELECT + idcollar as serial, + COUNT(*)::int AS telemetry_count, + MAX(idposition) as max_idposition + FROM + telemetry_vectronic + GROUP BY + idcollar; + `; + + const result = await this.connection.sql( + sqlStatement, + z.object({ serial: z.number(), telemetry_count: z.number(), max_idposition: z.number().nullable() }) + ); + + return result.rows; + } +} diff --git a/api/src/repositories/telemetry-repositories/telemetry-vendor-repository.interface.ts b/api/src/repositories/telemetry-repositories/telemetry-vendor-repository.interface.ts new file mode 100644 index 0000000000..9910296126 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-vendor-repository.interface.ts @@ -0,0 +1,62 @@ +import { z } from 'zod'; +import { GeoJSONPointZodSchema } from '../../zod-schema/geoJsonZodSchema'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; + +/** + * Telemetry vendor enumeration. + * + * Note: These values (except MANUAL) must match what exists in the `device_make` table + */ +export enum TelemetryVendorEnum { + /** + * Vectronic telemetry vendor. + * + * Note: Automatic data fetching nightly via Cronjob. + */ + VECTRONIC = 'vectronic', + /** + * Lotek telemetry vendor. + * + * Note: Automatic data fetching nightly via Cronjob. + */ + LOTEK = 'lotek', + /** + * ATS telemetry vendor. + * + * Note: Automatic data fetching deprecated. Data still available in database. + */ + ATS = 'ats', + /** + * Manual telemetry vendor. + * + * Note: Telemetry that is manually added by users. + */ + MANUAL = 'manual' +} + +export const TelemetrySchema = z.object({ + telemetry_id: z.string(), // Vendor telemetry ID (Primary Key) + deployment_id: z.number(), // SIMS Deployment ID + critter_id: z.number(), // SIMS Critter ID + vendor: z.nativeEnum(TelemetryVendorEnum), // Telemetry vendor + serial: z.string(), // Telemetry device serial number + acquisition_date: z.string(), // Date telemetry was retrieved + latitude: z.number().nullable(), // Latitude of telemetry (Y axis) + longitude: z.number().nullable(), // Longitude of telemetry (X axis) + elevation: z.number().nullable(), // Elevation of telemetry in meters + temperature: z.number().nullable() // Temperature in Celsius +}); + +export type Telemetry = z.infer; + +export const TelemetrySpatialSchema = z.object({ + telemetry_id: z.string(), // Telemetry ID (Primary Key) + geometry: GeoJSONPointZodSchema.nullable() // GeoJSON Point +}); + +export type TelemetrySpatial = z.infer; + +export type TelemetryOptions = { + pagination?: ApiPaginationOptions; + dateRange?: { startDate?: string; endDate?: string }; +}; diff --git a/api/src/repositories/telemetry-repositories/telemetry-vendor-repository.ts b/api/src/repositories/telemetry-repositories/telemetry-vendor-repository.ts new file mode 100644 index 0000000000..29d7037ed9 --- /dev/null +++ b/api/src/repositories/telemetry-repositories/telemetry-vendor-repository.ts @@ -0,0 +1,999 @@ +import { Knex } from 'knex'; +import { z } from 'zod'; +import { getKnex } from '../../database/db'; +import { ApiExecuteSQLError } from '../../errors/api-error'; +import { IAllTelemetryAdvancedFilters } from '../../models/telemetry-view'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { BaseRepository } from '../base-repository'; +import { + Telemetry, + TelemetryOptions, + TelemetrySchema, + TelemetrySpatial, + TelemetrySpatialSchema, + TelemetryVendorEnum +} from './telemetry-vendor-repository.interface'; + +/** + * A repository class for working with telemetry vendor data. + * + * @export + * @class TelemetryVendorRepository + * @extends {BaseRepository} + */ +export class TelemetryVendorRepository extends BaseRepository { + /** + * Get normalized `Lotek` telemetry base query. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @returns {Knex.QueryBuilder} + */ + getLotekTelemetryBaseQuery(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder + .select( + 'telemetry_lotek.telemetry_lotek_id as telemetry_id', + 'deployment.deployment_id as deployment_id', + 'deployment.critter_id as critter_id', + knex.raw(`'${TelemetryVendorEnum.LOTEK}' as vendor`), + knex.raw('telemetry_lotek.deviceid::text as serial'), + knex.raw('telemetry_lotek.recdatetime as acquisition_date'), + 'telemetry_lotek.latitude', + 'telemetry_lotek.longitude', + 'telemetry_lotek.altitude as elevation', + 'telemetry_lotek.temperature' + ) + .from('telemetry_lotek'); + } + + /** + * Add where clause to filter `Lotek` telemetry data by device attachment date range. + * + * Note: Joins the `deployment` table. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} startDate + * @param {string} endDate + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getLotekTelemetryByAttachmentDateRangeClause(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + return queryBuilder + .join('deployment', 'telemetry_lotek.device_key', 'deployment.device_key') + .andWhereRaw('telemetry_lotek.recdatetime >= deployment.attachment_start_timestamp') + .andWhere((qb) => + qb + .orWhereRaw('telemetry_lotek.recdatetime <= deployment.attachment_end_timestamp') + .orWhereRaw('deployment.attachment_end_timestamp IS NULL') + ); + } + + /** + * Add where clause to filter `Lotek` telemetry data by a survey ID. + * + * TODO: Add check for credentials (same method or different method?) + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {number} surveyId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getLotekTelemetryBySurveyIdClause(queryBuilder: Knex.QueryBuilder, surveyId: number): Knex.QueryBuilder { + return queryBuilder.andWhere('deployment.survey_id', surveyId); + } + + /** + * Add where clause to filter `Lotek` telemetry data by a list of deployment IDs. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @param {number[]} deploymentIds + * @returns {Knex.QueryBuilder} + */ + getLotekTelemetryByDeploymentIdsClause(queryBuilder: Knex.QueryBuilder, deploymentIds: number[]): Knex.QueryBuilder { + return queryBuilder.whereIn('deployment.deployment_id', deploymentIds); + } + + /** + * Add where clause to filter `Lotek` telemetry data by a telemetry ID. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} telemetryId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getLotekTelemetryByTelemetryIdClause(queryBuilder: Knex.QueryBuilder, telemetryId: string): Knex.QueryBuilder { + return queryBuilder.andWhere('telemetry_lotek.telemetry_lotek_id', telemetryId); + } + + /** + * Find `Lotek` telemetry data records the user has access to, based on filters and pagination options. + * + * TODO: Add check for credentials (same method or different method?) + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + findLotekTelemetryClause( + queryBuilder: Knex.QueryBuilder, + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Knex.QueryBuilder { + const knex = getKnex(); + + queryBuilder.join('survey', 'deployment.survey_id', 'survey.survey_id'); + + if (!isUserAdmin) { + // If the user is not an admin, filter results by the projects/surveys they have access to + queryBuilder + .join('project_participation', 'survey.project_id', 'project_participation.project_id') + .where('project_participation.system_user_id', systemUserId); + } + + if (filterFields.keyword) { + // Keyword Search filter + const keywordMatch = `%${filterFields.keyword}%`; + queryBuilder.where((subQueryBuilder) => { + subQueryBuilder + .where(knex.raw(`'${TelemetryVendorEnum.LOTEK}'`), 'ilike', keywordMatch) + .orWhere(knex.raw('telemetry_lotek.deviceid::text'), 'ilike', keywordMatch); + }); + } + + if (filterFields.start_date) { + queryBuilder.where('telemetry_lotek.recdatetime', '>=', filterFields.start_date); + } + + if (filterFields.end_date) { + queryBuilder.where('telemetry_lotek.recdatetime', '<=', filterFields.end_date); + } + + if (filterFields.system_user_id) { + // If a system user ID is provided, filter results by the projects/surveys that user has access to + queryBuilder.whereIn('survey.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + return queryBuilder; + } + + /** + * Get normalized `Vectronic` telemetry base query. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @returns {Knex.QueryBuilder} + */ + getVectronicTelemetryBaseQuery(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder + .select( + 'telemetry_vectronic.telemetry_vectronic_id as telemetry_id', + 'deployment.deployment_id as deployment_id', + 'deployment.critter_id as critter_id', + knex.raw(`'${TelemetryVendorEnum.VECTRONIC}' as vendor`), + knex.raw('telemetry_vectronic.idcollar::text as serial'), + knex.raw('telemetry_vectronic.acquisitiontime as acquisition_date'), + 'telemetry_vectronic.latitude', + 'telemetry_vectronic.longitude', + 'telemetry_vectronic.height as elevation', + 'telemetry_vectronic.temperature' + ) + .from('telemetry_vectronic'); + } + + /** + * Add where clause to filter `Vectronic` telemetry data by device attachment date range. + * + * Note: Joins the `deployment` table. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} startDate + * @param {string} endDate + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getVectronicTelemetryByAttachmentDateRangeClause(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + return queryBuilder + .join('deployment', 'telemetry_vectronic.device_key', 'deployment.device_key') + .andWhereRaw('telemetry_vectronic.acquisitiontime >= deployment.attachment_start_timestamp') + .andWhere((qb) => + qb + .orWhereRaw('telemetry_vectronic.acquisitiontime <= deployment.attachment_end_timestamp') + .orWhereRaw('deployment.attachment_end_timestamp IS NULL') + ); + } + + /** + * Get normalized `Vectronic` telemetry data for a survey ID. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {number} surveyId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getVectronicTelemetryBySurveyIdClause(queryBuilder: Knex.QueryBuilder, surveyId: number): Knex.QueryBuilder { + return queryBuilder.andWhere('deployment.survey_id', surveyId); + } + + /** + * Add where clause to filter `Vectronic` telemetry data by a list of deployment IDs. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @param {number[]} deploymentIds + * @returns {Knex.QueryBuilder} + */ + getVectronicTelemetryByDeploymentIdsClause( + queryBuilder: Knex.QueryBuilder, + deploymentIds: number[] + ): Knex.QueryBuilder { + return queryBuilder.whereIn('deployment.deployment_id', deploymentIds); + } + + /** + * Add where clause to filter `Vectronic` telemetry data by a telemetry ID. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} telemetryId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getVectronicTelemetryByTelemetryIdClause(queryBuilder: Knex.QueryBuilder, telemetryId: string): Knex.QueryBuilder { + return queryBuilder.andWhere('telemetry_vectronic.telemetry_vectronic_id', telemetryId); + } + + /** + * Find `Vectronic` telemetry data records the user has access to, based on filters and pagination options. + * + * TODO: Add check for credentials (same method or different method?) + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + findVectronicTelemetryClause( + queryBuilder: Knex.QueryBuilder, + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Knex.QueryBuilder { + const knex = getKnex(); + + queryBuilder.join('survey', 'deployment.survey_id', 'survey.survey_id'); + + if (!isUserAdmin) { + // If the user is not an admin, filter results by the projects/surveys they have access to + queryBuilder + .join('project_participation', 'survey.project_id', 'project_participation.project_id') + .where('project_participation.system_user_id', systemUserId); + } + + if (filterFields.keyword) { + // Keyword Search filter + const keywordMatch = `%${filterFields.keyword}%`; + queryBuilder.where((subQueryBuilder) => { + subQueryBuilder + .where(knex.raw(`'${TelemetryVendorEnum.VECTRONIC}'`), 'ilike', keywordMatch) + .orWhere(knex.raw('telemetry_vectronic.idcollar::text'), 'ilike', keywordMatch); + }); + } + + if (filterFields.start_date) { + queryBuilder.where('telemetry_vectronic.acquisitiontime', '>=', filterFields.start_date); + } + + if (filterFields.end_date) { + queryBuilder.where('telemetry_vectronic.acquisitiontime', '<=', filterFields.end_date); + } + + if (filterFields.system_user_id) { + // If a system user ID is provided, filter results by the projects/surveys that user has access to + queryBuilder.whereIn('survey.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + return queryBuilder; + } + + /** + * Get normalized `ATS` telemetry base query. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @returns {Knex.QueryBuilder} + */ + getATSTelemetryBaseQuery(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder + .select( + 'telemetry_ats.telemetry_ats_id as telemetry_id', + 'deployment.deployment_id as deployment_id', + 'deployment.critter_id as critter_id', + knex.raw(`'${TelemetryVendorEnum.ATS}' as vendor`), + knex.raw('telemetry_ats.collarserialnumber::text as serial'), + 'telemetry_ats.date as acquisition_date', + 'telemetry_ats.latitude', + 'telemetry_ats.longitude', + knex.raw('NULL as elevation'), + knex.raw('telemetry_ats.temperature::float') + ) + .from('telemetry_ats'); + } + + /** + * Add where clause to filter `ATS` telemetry data by device attachment date range. + * + * Note: Joins the `deployment` table. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} startDate + * @param {string} endDate + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getATSTelemetryByAttachmentDateRangeClause(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + return queryBuilder + .join('deployment', 'telemetry_ats.device_key', 'deployment.device_key') + .andWhereRaw('telemetry_ats.date >= deployment.attachment_start_timestamp') + .andWhere((qb) => + qb + .orWhereRaw('telemetry_ats.date <= deployment.attachment_end_timestamp') + .orWhereRaw('deployment.attachment_end_timestamp IS NULL') + ); + } + + /** + * Add where clause to filter `ATS` telemetry data by device attachment date range. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {number} surveyId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getATSTelemetryBySurveyIdClause(queryBuilder: Knex.QueryBuilder, surveyId: number): Knex.QueryBuilder { + return queryBuilder.andWhere('deployment.survey_id', surveyId); + } + + /** + * Add where clause to filter `ATS` telemetry data by a telemetry ID. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @param {number[]} deploymentIds + * @returns {Knex.QueryBuilder} + */ + getATSTelemetryByDeploymentIdsClause(queryBuilder: Knex.QueryBuilder, deploymentIds: number[]): Knex.QueryBuilder { + return queryBuilder.whereIn('deployment.deployment_id', deploymentIds); + } + + /** + * Get normalized `ATS` telemetry data for a single telemetry ID. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} telemetryId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getATSTelemetryByTelemetryIdClause(queryBuilder: Knex.QueryBuilder, telemetryId: string): Knex.QueryBuilder { + return queryBuilder.andWhere('telemetry_ats.telemetry_ats_id', telemetryId); + } + + /** + * Find `ATS` telemetry data records the user has access to, based on filters and pagination options. + * + * TODO: Add check for credentials (same method or different method?) + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + findATSTelemetryClause( + queryBuilder: Knex.QueryBuilder, + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Knex.QueryBuilder { + const knex = getKnex(); + + queryBuilder.join('survey', 'deployment.survey_id', 'survey.survey_id'); + + if (!isUserAdmin) { + // If the user is not an admin, filter results by the projects/surveys they have access to + queryBuilder + .join('project_participation', 'survey.project_id', 'project_participation.project_id') + .where('project_participation.system_user_id', systemUserId); + } + + if (filterFields.keyword) { + // Keyword Search filter + const keywordMatch = `%${filterFields.keyword}%`; + queryBuilder.where((subQueryBuilder) => { + subQueryBuilder + .where(knex.raw(`'${TelemetryVendorEnum.ATS}'`), 'ilike', keywordMatch) + .orWhere(knex.raw('telemetry_ats.collarserialnumber::text'), 'ilike', keywordMatch); + }); + } + + if (filterFields.start_date) { + queryBuilder.where('telemetry_ats.date', '>=', filterFields.start_date); + } + + if (filterFields.end_date) { + queryBuilder.where('telemetry_ats.date', '<=', filterFields.end_date); + } + + if (filterFields.system_user_id) { + // If a system user ID is provided, filter results by the projects/surveys that user has access to + queryBuilder.whereIn('survey.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + return queryBuilder; + } + + /** + * Get normalized `Manual` telemetry base query. + * + * Note: Joins the `deployment`, `device` tables. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @returns {Knex.QueryBuilder} + */ + getManualTelemetryBaseQuery(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder + .select( + 'telemetry_manual.telemetry_manual_id as telemetry_id', + 'telemetry_manual.deployment_id as deployment_id', + 'deployment.critter_id as critter_id', + knex.raw(`'${TelemetryVendorEnum.MANUAL}' as vendor`), + 'device.serial', + 'telemetry_manual.acquisition_date', + 'telemetry_manual.latitude', + 'telemetry_manual.longitude', + knex.raw('NULL as elevation'), + knex.raw('NULL as temperature') + ) + .from('telemetry_manual') + .join('deployment', 'telemetry_manual.deployment_id', 'deployment.deployment_id') + .join('device', 'deployment.device_id', 'device.device_id'); + } + + /** + * Add where clause to filter `Manual` telemetry data by device attachment date range. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {number} surveyId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getManualTelemetryBySurveyIdClause(queryBuilder: Knex.QueryBuilder, surveyId: number): Knex.QueryBuilder { + return queryBuilder.andWhere('deployment.survey_id', surveyId); + } + + /** + * Add where clause to filter `Manual` telemetry data by device attachment date range. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} startDate + * @param {string} endDate + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getManualTelemetryByAttachmentDateRangeClause(queryBuilder: Knex.QueryBuilder): Knex.QueryBuilder { + return queryBuilder + .andWhereRaw('telemetry_manual.acquisition_date >= deployment.attachment_start_timestamp') + .andWhere((qb) => + qb + .orWhereRaw('telemetry_manual.acquisition_date <= deployment.attachment_end_timestamp') + .orWhereRaw('deployment.attachment_end_timestamp IS NULL') + ); + } + + /** + * Add where clause to filter `Manual` telemetry data by a list of deployment IDs. + * + * @see TelemetrySchema ./telemetry-vendor-repository.interface.ts + * @param {Knex.QueryBuilder} queryBuilder + * @param {number[]} deploymentIds + * @returns {Knex.QueryBuilder} + */ + getManualTelemetryByDeploymentIdsClause(queryBuilder: Knex.QueryBuilder, deploymentIds: number[]): Knex.QueryBuilder { + return queryBuilder.whereIn('deployment.deployment_id', deploymentIds); + } + + /** + * Add where clause to filter `Manual` telemetry data by a telemetry ID. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {string} telemetryId + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getManualTelemetryByTelemetryIdClause(queryBuilder: Knex.QueryBuilder, telemetryId: string): Knex.QueryBuilder { + return queryBuilder.andWhere('telemetry_manual.telemetry_manual_id', telemetryId); + } + + /** + * Find `Manual` telemetry data records the user has access to, based on filters and pagination options. + * + * TODO: Add check for credentials (same method or different method?) + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + findManualTelemetryClause( + queryBuilder: Knex.QueryBuilder, + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Knex.QueryBuilder { + const knex = getKnex(); + + queryBuilder.join('survey', 'deployment.survey_id', 'survey.survey_id'); + + if (!isUserAdmin) { + // If the user is not an admin, filter results by the projects/surveys they have access to + queryBuilder + .join('project_participation', 'survey.project_id', 'project_participation.project_id') + .where('project_participation.system_user_id', systemUserId); + } + + if (filterFields.keyword) { + // Keyword Search filter + const keywordMatch = `%${filterFields.keyword}%`; + queryBuilder.where((subQueryBuilder) => { + subQueryBuilder.where(knex.raw(`'${TelemetryVendorEnum.MANUAL}'`), 'ilike', keywordMatch); + }); + } + + if (filterFields.start_date) { + queryBuilder.where('telemetry_manual.acquisition_date', '>=', filterFields.start_date); + } + + if (filterFields.end_date) { + queryBuilder.where('telemetry_manual.acquisition_date', '<=', filterFields.end_date); + } + + if (filterFields.system_user_id) { + // If the user is not an admin, filter results by the projects/surveys they have access to + queryBuilder.whereIn('survey.project_id', (subQueryBuilder) => { + subQueryBuilder + .select('project_id') + .from('project_participation') + .where('system_user_id', filterFields.system_user_id); + }); + } + + return queryBuilder; + } + + /** + * Get normalized telemetry data for all vendors for list of deployment IDs. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {number} surveyId + * @param {number[]} deploymentIds + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + getTelemetryByDeploymentIdsBaseQuery( + queryBuilder: Knex.QueryBuilder, + surveyId: number, + deploymentIds: number[] + ): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder.unionAll([ + /** + * LOTEK Telemetry + */ + this.getLotekTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getLotekTelemetryByAttachmentDateRangeClause) + .modify(this.getLotekTelemetryBySurveyIdClause, surveyId) + .modify(this.getLotekTelemetryByDeploymentIdsClause, deploymentIds), + /** + * VECTRONIC Telemetry + */ + this.getVectronicTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getVectronicTelemetryByAttachmentDateRangeClause) + .modify(this.getVectronicTelemetryBySurveyIdClause, surveyId) + .modify(this.getVectronicTelemetryByDeploymentIdsClause, deploymentIds), + /** + * ATS Telemetry + */ + this.getATSTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getATSTelemetryByAttachmentDateRangeClause) + .modify(this.getATSTelemetryBySurveyIdClause, surveyId) + .modify(this.getATSTelemetryByDeploymentIdsClause, deploymentIds), + /** + * MANUAL Telemetry + */ + this.getManualTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getManualTelemetryByAttachmentDateRangeClause) + .modify(this.getManualTelemetryBySurveyIdClause, surveyId) + .modify(this.getManualTelemetryByDeploymentIdsClause, deploymentIds) + ]); + } + + /** + * Get all telemetry data for list of deployment IDs. + * + * Note: Currently supports, `Lotek`, `Vectronic`, `ATS`, and `Manual` telemetry. + * + * @param {number} surveyId + * @param {number[]} deploymentIds + * @param {TelemetryOptions} [options] - Telemetry request options + * @returns {Promise} + */ + async getTelemetryByDeploymentIds( + surveyId: number, + deploymentIds: number[], + options?: TelemetryOptions + ): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .with('telemetry', (qb) => { + this.getTelemetryByDeploymentIdsBaseQuery(qb, surveyId, deploymentIds); + }) + .select('*') + .from('telemetry'); + + // Inject date range if provided + if (options?.dateRange) { + if (options.dateRange.startDate) { + queryBuilder.where('telemetry.acquisition_date', '>=', options.dateRange.startDate); + } + + if (options.dateRange.endDate) { + queryBuilder.where('telemetry.acquisition_date', '<=', options.dateRange.endDate); + } + } + + // Inject pagination / sorting if provided + if (options?.pagination) { + queryBuilder.limit(options.pagination.limit).offset((options.pagination.page - 1) * options.pagination.limit); + + if (options.pagination.sort && options.pagination.order) { + if (options.pagination.sort === 'acquisition_time') { + // Allow sorting by acquisition_time, which is not a real database column + queryBuilder.orderByRaw(knex.raw(`acquisition_date::time ${options.pagination.order}`)); + } else { + queryBuilder.orderBy(options.pagination.sort, options.pagination.order); + } + } + } + + const response = await this.connection.knex(queryBuilder, TelemetrySchema); + + return response.rows; + } + + /** + * Get all telemetry spatial data for list of deployment IDs. + * + * Note: Currently supports, `Lotek`, `Vectronic`, `ATS`, and `Manual` telemetry. + * + * @param {number} surveyId + * @param {number[]} deploymentIds + * @returns {Promise} + */ + async getTelemetrySpatialByDeploymentIds(surveyId: number, deploymentIds: number[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex.queryBuilder(); + + queryBuilder + .with('telemetry', (qb) => { + this.getTelemetryByDeploymentIdsBaseQuery(qb, surveyId, deploymentIds); + }) + .select( + 'telemetry.telemetry_id', + knex.raw(` + CASE WHEN telemetry.longitude IS NULL OR telemetry.latitude IS NULL THEN NULL + ELSE JSON_BUILD_OBJECT('type', 'Point', 'coordinates', JSON_BUILD_ARRAY(telemetry.longitude, telemetry.latitude)) + END as geometry + `) + ) + .from('telemetry'); + + const response = await this.connection.knex(queryBuilder, TelemetrySpatialSchema); + + return response.rows; + } + + /** + * Get the total count of all telemetry records for list of deployment IDs. + * + * Note: Currently supports, `Lotek`, `Vectronic`, `ATS`, and `Manual` telemetry. + * + * @param {number} surveyId + * @param {number[]} deploymentIds + * @return {*} {Promise} + * @memberof TelemetryVendorRepository + */ + async getTelemetryCountByDeploymentIds(surveyId: number, deploymentIds: number[]): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .with('telemetry', (qb) => { + this.getTelemetryByDeploymentIdsBaseQuery(qb, surveyId, deploymentIds); + }) + .select(knex.raw('count(*)::integer as count')) + .from('telemetry'); + + const response = await this.connection.knex(queryBuilder, z.object({ count: z.number() })); + + return response.rows[0].count; + } + + /** + * Get telemetry record by telemetry ID. + * + * @param {number} surveyId + * @param {string} telemetryId + * @return {*} {Promise} + * @memberof TelemetryVendorRepository + */ + async getTelemetryRecordById(surveyId: number, telemetryId: string): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .with('telemetry', (withQueryBuilder) => { + withQueryBuilder.unionAll([ + /** + * LOTEK Telemetry + */ + this.getLotekTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getLotekTelemetryByAttachmentDateRangeClause) + .modify(this.getLotekTelemetryBySurveyIdClause, surveyId) + .modify(this.getLotekTelemetryByTelemetryIdClause, telemetryId), + /** + * VECTRONIC Telemetry + */ + this.getVectronicTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getVectronicTelemetryByAttachmentDateRangeClause) + .modify(this.getVectronicTelemetryBySurveyIdClause, surveyId) + .modify(this.getVectronicTelemetryByTelemetryIdClause, telemetryId), + /** + * ATS Telemetry + */ + this.getATSTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getATSTelemetryByAttachmentDateRangeClause) + .modify(this.getATSTelemetryBySurveyIdClause, surveyId) + .modify(this.getATSTelemetryByTelemetryIdClause, telemetryId), + /** + * MANUAL Telemetry + */ + this.getManualTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getManualTelemetryByAttachmentDateRangeClause) + .modify(this.getManualTelemetryBySurveyIdClause, surveyId) + .modify(this.getManualTelemetryByTelemetryIdClause, telemetryId) + ]); + }) + .select('*') + .from('telemetry'); + + const response = await this.connection.knex(queryBuilder, TelemetrySchema); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to get telemetry record', [ + 'TelemetryVendorRepository->getTelemetryRecordById', + 'rowCount was != 1, expected rowCount = 1' + ]); + } + + return response.rows[0]; + } + + /** + * Get normalized telemetry for all telemetry records the user has access to, based on filters and pagination options. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + findTelemetryBaseQuery( + queryBuilder: Knex.QueryBuilder, + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder.unionAll([ + /** + * LOTEK Telemetry + */ + this.getLotekTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getLotekTelemetryByAttachmentDateRangeClause) + .modify(this.findLotekTelemetryClause, isUserAdmin, systemUserId, filterFields), + /** + * VECTRONIC Telemetry + */ + this.getVectronicTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getVectronicTelemetryByAttachmentDateRangeClause) + .modify(this.findVectronicTelemetryClause, isUserAdmin, systemUserId, filterFields), + /** + * ATS Telemetry + */ + this.getATSTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getATSTelemetryByAttachmentDateRangeClause) + .modify(this.findATSTelemetryClause, isUserAdmin, systemUserId, filterFields), + /** + * MANUAL Telemetry + */ + this.getManualTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getManualTelemetryByAttachmentDateRangeClause) + .modify(this.findManualTelemetryClause, isUserAdmin, systemUserId, filterFields) + ]); + } + + /** + * Retrieves the paginated list of all surveys that are available to the user. + * + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId The system user id of the user making the request + * @param {IAllTelemetryAdvancedFilters} filterFields + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryVendorRepository + */ + async findTelemetry( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters, + pagination?: ApiPaginationOptions + ): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .with('telemetry', (qb) => { + this.findTelemetryBaseQuery(qb, isUserAdmin, systemUserId, filterFields); + }) + .select('*') + .from('telemetry'); + + // Inject pagination / sorting if provided + if (pagination) { + queryBuilder.limit(pagination.limit).offset((pagination.page - 1) * pagination.limit); + + if (pagination.sort && pagination.order) { + if (pagination.sort === 'acquisition_time') { + // Allow sorting by acquisition_time, which is not a real database column + queryBuilder.orderByRaw(knex.raw(`acquisition_date::time ${pagination.order}`)); + } else { + queryBuilder.orderBy(pagination.sort, pagination.order); + } + } + } + + const response = await this.connection.knex(queryBuilder, TelemetrySchema); + + return response.rows; + } + + /** + * Get telemetry count for all telemetry records the user has access to, based on filters and pagination options. + * + * @param {Knex.QueryBuilder} queryBuilder + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Knex.QueryBuilder} + * @memberof TelemetryVendorRepository + */ + findTelemetryCountBaseQuery( + queryBuilder: Knex.QueryBuilder, + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Knex.QueryBuilder { + const knex = getKnex(); + + return queryBuilder.unionAll([ + /** + * LOTEK Telemetry + */ + this.getLotekTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getLotekTelemetryByAttachmentDateRangeClause) + .modify(this.findLotekTelemetryClause, isUserAdmin, systemUserId, filterFields), + /** + * VECTRONIC Telemetry + */ + this.getVectronicTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getVectronicTelemetryByAttachmentDateRangeClause) + .modify(this.findVectronicTelemetryClause, isUserAdmin, systemUserId, filterFields), + /** + * ATS Telemetry + */ + this.getATSTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getATSTelemetryByAttachmentDateRangeClause) + .modify(this.findATSTelemetryClause, isUserAdmin, systemUserId, filterFields), + /** + * MANUAL Telemetry + */ + this.getManualTelemetryBaseQuery(knex.queryBuilder()) + .modify(this.getManualTelemetryByAttachmentDateRangeClause) + .modify(this.findManualTelemetryClause, isUserAdmin, systemUserId, filterFields) + ]); + } + + /** + * Returns the total number of surveys that the user has access to + * + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {ISurveyAdvancedFilters} filterFields + * @return {*} {Promise} + * @memberof SurveyService + */ + async findTelemetryCount( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .with('telemetry', (qb) => { + this.findTelemetryCountBaseQuery(qb, isUserAdmin, systemUserId, filterFields); + }) + .select(knex.raw('count(*)::integer as count')) + .from('telemetry'); + + const response = await this.connection.knex(queryBuilder, z.object({ count: z.number() })); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to get telemetry count', [ + 'TelemetryVendorRepository->findTelemetryCount', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0].count; + } +} diff --git a/api/src/repositories/telemetry-repository.ts b/api/src/repositories/telemetry-repository.ts deleted file mode 100644 index 4246e7ba9e..0000000000 --- a/api/src/repositories/telemetry-repository.ts +++ /dev/null @@ -1,156 +0,0 @@ -import SQL from 'sql-template-strings'; -import { z } from 'zod'; -import { getKnex } from '../database/db'; -import { ApiExecuteSQLError } from '../errors/api-error'; -import { getLogger } from '../utils/logger'; -import { BaseRepository } from './base-repository'; - -const defaultLog = getLogger('repositories/telemetry-repository'); - -export const Deployment = z.object({ - /** - * SIMS deployment primary ID - */ - deployment_id: z.number(), - /** - * SIMS critter primary ID - */ - critter_id: z.number(), - /** - * BCTW deployment primary ID - */ - bctw_deployment_id: z.string().uuid() -}); - -export type Deployment = z.infer; - -/** - * Interface reflecting survey telemetry retrieved from the database - */ -export const TelemetrySubmissionRecord = z.object({ - survey_telemetry_submission_id: z.number(), - survey_id: z.number(), - key: z.string(), - original_filename: z.string(), - create_date: z.string(), - create_user: z.number(), - update_date: z.string().nullable(), - update_user: z.number().nullable() -}); - -export type TelemetrySubmissionRecord = z.infer; - -export class TelemetryRepository extends BaseRepository { - async insertSurveyTelemetrySubmission( - submission_id: number, - key: string, - survey_id: number, - original_filename: string - ): Promise { - defaultLog.debug({ label: 'insertSurveyTelemetrySubmission' }); - const sqlStatement = SQL` - INSERT INTO - survey_telemetry_submission - (survey_telemetry_submission_id, key, survey_id, original_filename) - VALUES - (${submission_id}, ${key}, ${survey_id}, ${original_filename}) - RETURNING *;`; - - const response = await this.connection.sql(sqlStatement, TelemetrySubmissionRecord); - - return response.rows[0]; - } - - /** - * Retrieves the next submission ID from the survey_telemetry_submission_id_seq sequence - * - * @return {*} {Promise} - * @memberof TelemetryRepository - */ - async getNextSubmissionId(): Promise { - const sqlStatement = SQL` - SELECT nextval('biohub.survey_telemetry_submission_id_seq')::integer as survey_telemetry_submission; - `; - const response = await this.connection.sql<{ survey_telemetry_submission: number }>(sqlStatement); - return response.rows[0].survey_telemetry_submission; - } - - /** - * Retrieves the telemetry submission record by the given submission ID. - * - * @param {number} submissionId - * @return {*} {Promise} - * @memberof TelemetryRepository - */ - async getTelemetrySubmissionById(submissionId: number): Promise { - const queryBuilder = getKnex() - .queryBuilder() - .select('*') - .from('survey_telemetry_submission') - .where('survey_telemetry_submission_id', submissionId); - - const response = await this.connection.knex(queryBuilder, TelemetrySubmissionRecord); - - if (!response.rowCount) { - throw new ApiExecuteSQLError('Failed to get telemetry submission', [ - 'TelemetryRepository->getTelemetrySubmissionById', - 'rowCount was null or undefined, expected rowCount = 1' - ]); - } - - return response.rows[0]; - } - - /** - * Get deployments for the given critter ids. - * - * Note: SIMS does not store deployment information, beyond an ID. Deployment details must be fetched from the - * external BCTW API. - * - * @param {number[]} critterIds - * @return {*} {Promise} - * @memberof TelemetryRepository - */ - async getDeploymentsByCritterIds(critterIds: number[]): Promise { - const queryBuilder = getKnex() - .queryBuilder() - .select(['deployment_id', 'critter_id', 'bctw_deployment_id']) - .from('deployment') - .whereIn('critter_id', critterIds); - - const response = await this.connection.knex(queryBuilder, Deployment); - - return response.rows; - } - - /** - * Get deployments for the provided survey id. - * - * Note: SIMS does not store deployment information, beyond an ID. Deployment details must be fetched from the - * external BCTW API. - * - * @param {number} surveyId - * @return {*} {Promise} - * @memberof TelemetryRepository - */ - async getDeploymentsBySurveyId(surveyId: number): Promise { - const sqlStatement = SQL` - SELECT - deployment.deployment_id, - deployment.critter_id, - deployment.bctw_deployment_id - FROM - deployment - LEFT JOIN - critter - ON - critter.critter_id = deployment.critter_id - WHERE - critter.survey_id = ${surveyId}; - `; - - const response = await this.connection.sql(sqlStatement, Deployment); - - return response.rows; - } -} diff --git a/api/src/services/bctw-service/bctw-deployment-service.ts b/api/src/services/bctw-service/bctw-deployment-service.ts deleted file mode 100644 index 1d4a9da582..0000000000 --- a/api/src/services/bctw-service/bctw-deployment-service.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { z } from 'zod'; -import { BctwService } from './bctw-service'; - -export const BctwDeploymentRecordWithDeviceMeta = z.object({ - assignment_id: z.string().uuid(), - collar_id: z.string().uuid(), - critter_id: z.string().uuid(), - created_at: z.string(), - created_by_user_id: z.string().nullable(), - updated_at: z.string().nullable(), - updated_by_user_id: z.string().nullable(), - valid_from: z.string(), - valid_to: z.string().nullable(), - attachment_start: z.string(), - attachment_end: z.string().nullable(), - deployment_id: z.string(), - device_id: z.number().nullable(), - device_make: z.number().nullable(), - device_model: z.string().nullable(), - frequency: z.number().nullable(), - frequency_unit: z.number().nullable() -}); -export type BctwDeploymentRecordWithDeviceMeta = z.infer; - -export const BctwDeploymentRecord = z.object({ - assignment_id: z.string(), - collar_id: z.string(), - critter_id: z.string(), - created_at: z.string(), - created_by_user_id: z.string(), - updated_at: z.string().nullable(), - updated_by_user_id: z.string().nullable(), - valid_from: z.string(), - valid_to: z.string().nullable(), - attachment_start: z.string(), - attachment_end: z.string().nullable(), - deployment_id: z.string(), - device_id: z.number() -}); -export type BctwDeploymentRecord = z.infer; - -export const BctwDeploymentUpdate = z.object({ - deployment_id: z.string(), - attachment_start: z.string(), - attachment_end: z.string().nullable() -}); -export type BctwDeploymentUpdate = z.infer; - -export const BctwDeployDevice = z.object({ - deployment_id: z.string().uuid(), - device_id: z.number(), - frequency: z.number().optional(), - frequency_unit: z.string().optional(), - device_make: z.string().optional(), - device_model: z.string().optional(), - attachment_start: z.string(), - attachment_end: z.string().nullable(), - critter_id: z.string() -}); -export type BctwDeployDevice = z.infer; - -export class BctwDeploymentService extends BctwService { - /** - * Create a new deployment for a telemetry device on a critter. - * - * @param {BctwDeployDevice} device - * @return {*} {Promise} - * @memberof BctwDeploymentService - */ - async createDeployment(device: BctwDeployDevice): Promise { - const { data } = await this.axiosInstance.post('/deploy-device', device); - - return data; - } - - /** - * Get deployment records for a list of deployment IDs. - * - * @param {string[]} deploymentIds - * @return {*} {Promise} - * @memberof BctwDeploymentService - */ - async getDeploymentsByIds(deploymentIds: string[]): Promise { - const { data } = await this.axiosInstance.post('/get-deployments', deploymentIds); - - return data; - } - - /** - * Get all existing deployments for a list of critter IDs. - * - * @param {string[]} critter_ids - * @return {*} {Promise} - * @memberof BctwDeploymentService - */ - async getDeploymentsByCritterId(critter_ids: string[]): Promise { - const { data } = await this.axiosInstance.get('/get-deployments-by-critter-id', { - params: { critter_ids: critter_ids } - }); - - return data; - } - - /** - * Update the start and end dates of an existing deployment. - * - * @param {BctwDeploymentUpdate} deployment - * @return {*} {Promise} - * @memberof BctwDeploymentService - */ - async updateDeployment(deployment: BctwDeploymentUpdate): Promise[]> { - const { data } = await this.axiosInstance.patch('/update-deployment', deployment); - - return data; - } - - /** - * Soft deletes the deployment in BCTW. - * - * @param {string} deployment_id uuid - * @returns {*} {Promise} - * @memberof BctwDeploymentService - */ - async deleteDeployment(deployment_id: string): Promise { - const { data } = await this.axiosInstance.delete(`/delete-deployment/${deployment_id}`); - - return data; - } -} diff --git a/api/src/services/bctw-service/bctw-device-service.ts b/api/src/services/bctw-service/bctw-device-service.ts deleted file mode 100644 index e53e38088a..0000000000 --- a/api/src/services/bctw-service/bctw-device-service.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { BctwDeployDevice } from './bctw-deployment-service'; -import { BctwService } from './bctw-service'; - -export type BctwDevice = Omit & { - collar_id: string; -}; - -export type BctwUpdateCollarRequest = { - /** - * The primary ID (uuid) of the collar record to update. - */ - collar_id: string; - device_make?: number | null; - device_model?: string | null; - frequency?: number | null; - frequency_unit?: number | null; -}; - -export class BctwDeviceService extends BctwService { - /** - * Get a list of all supported collar vendors. - * - * TODO: unused? - * - * @return {*} {Promise} - * @memberof BctwDeviceService - */ - async getCollarVendors(): Promise { - const { data } = await this.axiosInstance.get('/get-collar-vendors'); - - return data; - } - - /** - * Get device hardware details by device id and device make. - * - * TODO: unused? - * - * @param {number} deviceId - * @param {deviceMake} deviceMake - * @returns {*} {Promise} - * @memberof BctwService - */ - async getDeviceDetails(deviceId: number, deviceMake: string): Promise { - const { data } = await this.axiosInstance.get(`/get-collar-history-by-device/${deviceId}`, { - params: { make: deviceMake } - }); - - return data; - } - - /** - * Update device hardware details in BCTW. - * - * @param {BctwDevice} device - * @returns {*} {BctwDevice} - * @memberof BctwService - */ - async updateDevice(device: BctwDevice): Promise { - const { data } = await this.axiosInstance.post('/upsert-collar', device); - - if (data?.errors?.length) { - throw Error(JSON.stringify(data.errors)); - } - - return data; - } - - /** - * Update collar details in BCTW. - * - * @param {BctwUpdateCollarRequest} collar - The collar details to update. - * @return {*} {Promise} - * @memberof BctwDeviceService - */ - async updateCollar(collar: BctwUpdateCollarRequest): Promise { - const { data } = await this.axiosInstance.patch('/update-collar', collar); - - if (data?.errors?.length) { - throw Error(JSON.stringify(data.errors)); - } - - return data; - } -} diff --git a/api/src/services/bctw-service/bctw-keyx-service.test.ts b/api/src/services/bctw-service/bctw-keyx-service.test.ts deleted file mode 100644 index 52ad7a64e1..0000000000 --- a/api/src/services/bctw-service/bctw-keyx-service.test.ts +++ /dev/null @@ -1,83 +0,0 @@ -import chai, { expect } from 'chai'; -import FormData from 'form-data'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import { BctwKeyxService } from '../bctw-service/bctw-keyx-service'; - -chai.use(sinonChai); - -describe('BctwKeyxService', () => { - afterEach(() => { - sinon.restore(); - }); - - describe('uploadKeyX', () => { - it('should send a post request', async () => { - const mockUser = { keycloak_guid: 'abc123', username: 'testuser' }; - - const bctwKeyxService = new BctwKeyxService(mockUser); - - const mockAxios = sinon - .stub(bctwKeyxService.axiosInstance, 'post') - .resolves({ data: { results: [], errors: [] } }); - - const mockMulterFile = { buffer: 'buffer', originalname: 'originalname.keyx' } as unknown as Express.Multer.File; - - sinon.stub(FormData.prototype, 'append'); - - const mockGetFormDataHeaders = sinon - .stub(FormData.prototype, 'getHeaders') - .resolves({ 'content-type': 'multipart/form-data' }); - - const result = await bctwKeyxService.uploadKeyX(mockMulterFile); - - expect(mockGetFormDataHeaders).to.have.been.calledOnce; - expect(result).to.eql({ totalKeyxFiles: 0, newRecords: 0, existingRecords: 0 }); - expect(mockAxios).to.have.been.calledOnce; - }); - - it('should throw an error if the file is not a valid keyx file', async () => { - const mockUser = { keycloak_guid: 'abc123', username: 'testuser' }; - - const bctwKeyxService = new BctwKeyxService(mockUser); - - sinon.stub(bctwKeyxService.axiosInstance, 'post').rejects(); - - const mockMulterFile = { - buffer: 'buffer', - originalname: 'originalname.notValid' // invalid file extension - } as unknown as Express.Multer.File; - - sinon.stub(FormData.prototype, 'append'); - - sinon.stub(FormData.prototype, 'getHeaders').resolves({ 'content-type': 'multipart/form-data' }); - - await bctwKeyxService - .uploadKeyX(mockMulterFile) - .catch((e) => - expect(e.message).to.equal('File is neither a .keyx file, nor an archive containing only .keyx files') - ); - }); - - it('should throw an error if the response body has errors', async () => { - const mockUser = { keycloak_guid: 'abc123', username: 'testuser' }; - - const bctwKeyxService = new BctwKeyxService(mockUser); - - sinon - .stub(bctwKeyxService.axiosInstance, 'post') - .resolves({ data: { results: [], errors: [{ error: 'error' }] } }); - - const mockMulterFile = { buffer: 'buffer', originalname: 'originalname.keyx' } as unknown as Express.Multer.File; - - sinon.stub(FormData.prototype, 'append'); - - sinon.stub(FormData.prototype, 'getHeaders').resolves({ 'content-type': 'multipart/form-data' }); - - await bctwKeyxService - .uploadKeyX(mockMulterFile) - .catch((e) => expect(e.message).to.equal('API request failed with errors')); - }); - }); -}); diff --git a/api/src/services/bctw-service/bctw-keyx-service.ts b/api/src/services/bctw-service/bctw-keyx-service.ts deleted file mode 100644 index 5ced5e4c17..0000000000 --- a/api/src/services/bctw-service/bctw-keyx-service.ts +++ /dev/null @@ -1,102 +0,0 @@ -import FormData from 'form-data'; -import { z } from 'zod'; -import { ApiError, ApiErrorType } from '../../errors/api-error'; -import { checkFileForKeyx } from '../../utils/media/media-utils'; -import { BctwService } from './bctw-service'; - -export const BctwUploadKeyxResponse = z.object({ - errors: z.array( - z.object({ - row: z.string(), - error: z.string(), - rownum: z.number() - }) - ), - results: z.array( - z.object({ - idcollar: z.number(), - comtype: z.string(), - idcom: z.string(), - collarkey: z.string(), - collartype: z.number(), - dtlast_fetch: z.string().nullable() - }) - ) -}); -export type BctwUploadKeyxResponse = z.infer; - -export const BctwKeyXDetails = z.object({ - device_id: z.number(), - keyx: z - .object({ - idcom: z.string(), - comtype: z.string(), - idcollar: z.number(), - collarkey: z.string(), - collartype: z.number() - }) - .nullable() -}); -export type BctwKeyXDetails = z.infer; - -export class BctwKeyxService extends BctwService { - /** - * Upload a single or multiple zipped keyX files to the BCTW API. - * - * @param {Express.Multer.File} keyX - * @return {*} {Promise} - * @memberof BctwKeyxService - */ - async uploadKeyX(keyX: Express.Multer.File) { - const isValidKeyX = checkFileForKeyx(keyX); - - if (isValidKeyX.error) { - throw new ApiError(ApiErrorType.GENERAL, isValidKeyX.error); - } - - const formData = new FormData(); - - formData.append('xml', keyX.buffer, keyX.originalname); - - const config = { - headers: { - ...formData.getHeaders() - } - }; - - const response = await this.axiosInstance.post('/import-xml', formData, config); - - const data: BctwUploadKeyxResponse = response.data; - - if (data.errors.length) { - const actualErrors: string[] = []; - - for (const error of data.errors) { - // Ignore errors that indicate that a keyX already exists - if (!error.error.endsWith('already exists')) { - actualErrors.push(error.error); - } - } - - if (actualErrors.length) { - throw new ApiError(ApiErrorType.UNKNOWN, 'API request failed with errors', actualErrors); - } - } - - return { - totalKeyxFiles: data.results.length + data.errors.length, - newRecords: data.results.length, - existingRecords: data.errors.length - }; - } - - async getKeyXDetails(deviceIds: number[]): Promise { - const { data } = await this.axiosInstance.get('/get-collars-keyx', { - params: { - device_ids: deviceIds.map((id) => String(id)) - } - }); - - return data; - } -} diff --git a/api/src/services/bctw-service/bctw-service.ts b/api/src/services/bctw-service/bctw-service.ts deleted file mode 100644 index eb28484b91..0000000000 --- a/api/src/services/bctw-service/bctw-service.ts +++ /dev/null @@ -1,124 +0,0 @@ -import axios, { AxiosError, AxiosInstance, AxiosResponse } from 'axios'; -import { Request } from 'express'; -import { z } from 'zod'; -import { ApiError, ApiErrorType } from '../../errors/api-error'; -import { HTTP500 } from '../../errors/http-error'; -import { ICodeResponse } from '../../models/bctw'; -import { KeycloakService } from '../keycloak-service'; - -export const BctwUser = z.object({ - keycloak_guid: z.string(), - username: z.string() -}); -export type BctwUser = z.infer; - -export const getBctwUser = (req: Request): BctwUser => ({ - keycloak_guid: req.system_user?.user_guid ?? '', - username: req.system_user?.user_identifier ?? '' -}); - -export class BctwService { - user: BctwUser; - keycloak: KeycloakService; - axiosInstance: AxiosInstance; - - constructor(user: BctwUser) { - this.user = user; - this.keycloak = new KeycloakService(); - this.axiosInstance = axios.create({ - headers: { - user: this.getUserHeader() - }, - baseURL: process.env.BCTW_API_HOST || '', - timeout: 10000 - }); - - this.axiosInstance.interceptors.response.use( - (response: AxiosResponse) => { - return response; - }, - (error: AxiosError) => { - if ( - error?.code === 'ECONNREFUSED' || - error?.code === 'ECONNRESET' || - error?.code === 'ETIMEOUT' || - error?.code === 'ECONNABORTED' - ) { - return Promise.reject( - new HTTP500('Connection to the BCTW API server was refused. Please try again later.', [error?.message]) - ); - } - const data: any = error.response?.data; - const errMsg = data?.error ?? data?.errors ?? data ?? 'Unknown error'; - const issues = data?.issues ?? []; - - return Promise.reject( - new ApiError( - ApiErrorType.UNKNOWN, - `API request failed with status code ${error?.response?.status}: ${errMsg}`, - [].concat(errMsg).concat(issues) - ) - ); - } - ); - - // Async request interceptor - this.axiosInstance.interceptors.request.use( - async (config) => { - const token = await this.getToken(); - config.headers['Authorization'] = `Bearer ${token}`; - - return config; - }, - (error) => { - return Promise.reject(error); - } - ); - } - - /** - * Return user information as a JSON string. - * - * @return {*} {string} - * @memberof BctwService - */ - getUserHeader(): string { - return JSON.stringify(this.user); - } - - /** - * Retrieve an authentication token using Keycloak service. - * - * @return {*} {Promise} - * @memberof BctwService - */ - async getToken(): Promise { - const token = await this.keycloak.getKeycloakServiceToken(); - return token; - } - - /** - * Get the health of the platform. - * - * @return {*} {Promise} - * @memberof BctwService - */ - async getHealth(): Promise { - const { data } = await this.axiosInstance.get('/health'); - - return data; - } - - /** - * Get a list of all BCTW codes with a given header name. - * - * @param {string} codeHeaderName - * @return {*} {Promise} - * @memberof BctwService - */ - async getCode(codeHeaderName: string): Promise { - const { data } = await this.axiosInstance.get('/get-code', { params: { codeHeader: codeHeaderName } }); - - return data; - } -} diff --git a/api/src/services/bctw-service/bctw-telemetry-service.ts b/api/src/services/bctw-service/bctw-telemetry-service.ts deleted file mode 100644 index c8d1d2deba..0000000000 --- a/api/src/services/bctw-service/bctw-telemetry-service.ts +++ /dev/null @@ -1,139 +0,0 @@ -import { z } from 'zod'; -import { BctwService } from './bctw-service'; - -export const IAllTelemetry = z - .object({ - id: z.string().uuid(), - deployment_id: z.string().uuid(), - latitude: z.number(), - longitude: z.number(), - acquisition_date: z.string(), - telemetry_type: z.string() - }) - .and( - // One of telemetry_id or telemetry_manual_id is expected to be non-null - z.union([ - z.object({ - telemetry_id: z.string().uuid(), - telemetry_manual_id: z.null() - }), - z.object({ - telemetry_id: z.null(), - telemetry_manual_id: z.string().uuid() - }) - ]) - ); -export type IAllTelemetry = z.infer; - -export const IVendorTelemetry = z.object({ - telemetry_id: z.string(), - deployment_id: z.string().uuid(), - collar_transaction_id: z.string().uuid(), - critter_id: z.string().uuid(), - deviceid: z.number(), - latitude: z.number(), - longitude: z.number(), - elevation: z.number(), - vendor: z.string(), - acquisition_date: z.string() -}); -export type IVendorTelemetry = z.infer; - -export const IManualTelemetry = z.object({ - telemetry_manual_id: z.string().uuid(), - deployment_id: z.string().uuid(), - latitude: z.number(), - longitude: z.number(), - acquisition_date: z.string() -}); -export type IManualTelemetry = z.infer; - -export interface ICreateManualTelemetry { - deployment_id: string; - latitude: number; - longitude: number; - acquisition_date: string; -} - -export class BctwTelemetryService extends BctwService { - /** - * Get all manual telemetry records - * This set of telemetry is mostly useful for testing purposes. - * - * @returns {*} IManualTelemetry[] - **/ - async getManualTelemetry(): Promise { - const res = await this.axiosInstance.get('/manual-telemetry'); - return res.data; - } - - /** - * retrieves manual telemetry from list of deployment ids - * - * @async - * @param {string[]} deployment_ids - bctw deployments - * @returns {*} IManualTelemetry[] - */ - async getManualTelemetryByDeploymentIds(deployment_ids: string[]): Promise { - const res = await this.axiosInstance.post('/manual-telemetry/deployments', deployment_ids); - return res.data; - } - - /** - * retrieves manual telemetry from list of deployment ids - * - * @async - * @param {string[]} deployment_ids - bctw deployments - * @returns {*} IVendorTelemetry[] - */ - async getVendorTelemetryByDeploymentIds(deployment_ids: string[]): Promise { - const res = await this.axiosInstance.post('/vendor-telemetry/deployments', deployment_ids); - return res.data; - } - - /** - * retrieves manual and vendor telemetry from list of deployment ids - * - * @async - * @param {string[]} deploymentIds - bctw deployments - * @returns {*} IAllTelemetry[] - */ - async getAllTelemetryByDeploymentIds(deploymentIds: string[]): Promise { - const res = await this.axiosInstance.post('/all-telemetry/deployments', deploymentIds); - return res.data; - } - - /** - * Delete manual telemetry records by telemetry_manual_id - * Note: This is a post request that accepts an array of ids - * @param {string[]} telemetry_manual_ids - * - * @returns {*} IManualTelemetry[] - **/ - async deleteManualTelemetry(telemetry_manual_ids: string[]): Promise { - const res = await this.axiosInstance.post('/manual-telemetry/delete', telemetry_manual_ids); - return res.data; - } - - /** - * Bulk create manual telemetry records - * @param {ICreateManualTelemetry[]} payload - * - * @returns {*} IManualTelemetry[] - **/ - async createManualTelemetry(payload: ICreateManualTelemetry[]): Promise { - const res = await this.axiosInstance.post('/manual-telemetry', payload); - return res.data; - } - - /** - * Bulk update manual telemetry records - * @param {IManualTelemetry} payload - * - * @returns {*} IManualTelemetry[] - **/ - async updateManualTelemetry(payload: IManualTelemetry[]): Promise { - const res = await this.axiosInstance.patch('/manual-telemetry', payload); - return res.data; - } -} diff --git a/api/src/services/code-service.test.ts b/api/src/services/code-service.test.ts index d00351f3cc..4c15920d67 100644 --- a/api/src/services/code-service.test.ts +++ b/api/src/services/code-service.test.ts @@ -46,6 +46,8 @@ describe('CodeService', () => { 'survey_progress', 'method_response_metrics', 'observation_subcount_signs', + 'telemetry_device_makes', + 'frequency_units', 'alert_types', 'vantages' ); diff --git a/api/src/services/code-service.ts b/api/src/services/code-service.ts index f8c828d741..e52f7bcd5c 100644 --- a/api/src/services/code-service.ts +++ b/api/src/services/code-service.ts @@ -45,6 +45,8 @@ export class CodeService extends DBService { method_response_metrics, attractants, observation_subcount_signs, + telemetry_device_makes, + frequency_units, alert_types, vantages ] = await Promise.all([ @@ -68,6 +70,8 @@ export class CodeService extends DBService { await this.codeRepository.getMethodResponseMetrics(), await this.codeRepository.getAttractants(), await this.codeRepository.getObservationSubcountSigns(), + await this.codeRepository.getActiveTelemetryDeviceMakes(), + await this.codeRepository.getFrequencyUnits(), await this.codeRepository.getAlertTypes(), await this.codeRepository.getVantages() ]); @@ -93,6 +97,8 @@ export class CodeService extends DBService { method_response_metrics, attractants, observation_subcount_signs, + telemetry_device_makes, + frequency_units, alert_types, vantages }; diff --git a/api/src/services/deployment-service.ts b/api/src/services/deployment-service.ts deleted file mode 100644 index c0cc0faf23..0000000000 --- a/api/src/services/deployment-service.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { IDBConnection } from '../database/db'; -import { ICreateSurveyDeployment, IUpdateSurveyDeployment, SurveyDeployment } from '../models/survey-deployment'; -import { DeploymentRepository } from '../repositories/deployment-repository'; -import { DBService } from './db-service'; - -/** - * Service layer for survey critters. - * - * @export - * @class DeploymentService - * @extends {DBService} - */ -export class DeploymentService extends DBService { - deploymentRepository: DeploymentRepository; - - constructor(connection: IDBConnection) { - super(connection); - - this.deploymentRepository = new DeploymentRepository(connection); - } - - /** - * Get deployments for a Survey - * - * @param {number} surveyId - * @return {*} {Promise} - * @memberof DeploymentService - */ - async getDeploymentsForSurveyId(surveyId: number): Promise { - return this.deploymentRepository.getDeploymentsForSurveyId(surveyId); - } - - /** - * Get a specific deployment by its integer ID - * - * @param {number} deploymentId - * @return {*} {Promise} - * @memberof DeploymentService - */ - async getDeploymentById(deploymentId: number): Promise { - return this.deploymentRepository.getDeploymentById(deploymentId); - } - - /** - * Get a specific deployment by its integer ID - * - * @param {number} surveyId - * @param {number} critterId - * @return {*} {Promise} - * @memberof DeploymentService - */ - async getDeploymentForCritterId(surveyId: number, critterId: number): Promise { - return this.deploymentRepository.getDeploymentForCritterId(surveyId, critterId); - } - - /** - * Create a new deployment - * - * @param {ICreateSurveyDeployment} deployment - * @return {*} {Promise} - * @memberof DeploymentService - */ - async insertDeployment(deployment: ICreateSurveyDeployment): Promise { - return this.deploymentRepository.insertDeployment(deployment); - } - - /** - * Update a deployment in SIMS - * - * @param {IUpdateSurveyDeployment} deployment - * @return {*} {Promise} - * @memberof DeploymentService - */ - async updateDeployment(deployment: IUpdateSurveyDeployment): Promise { - return this.deploymentRepository.updateDeployment(deployment); - } - - /** - * Deletes the deployment in SIMS. - * - * @param {number} surveyId - * @param {number} deploymentId - * @return {*} {Promise<{ bctw_deployment_id: string }>} - * @memberof DeploymentService - */ - async deleteDeployment(surveyId: number, deploymentId: number): Promise<{ bctw_deployment_id: string }> { - return this.deploymentRepository.deleteDeployment(surveyId, deploymentId); - } -} diff --git a/api/src/services/export-services/telemetry/export-telemetry-strategy.ts b/api/src/services/export-services/telemetry/export-telemetry-strategy.ts index 64dd0ee938..069c483c4c 100644 --- a/api/src/services/export-services/telemetry/export-telemetry-strategy.ts +++ b/api/src/services/export-services/telemetry/export-telemetry-strategy.ts @@ -2,7 +2,7 @@ import { Readable } from 'stream'; import { IDBConnection } from '../../../database/db'; import { getLogger } from '../../../utils/logger'; import { DBService } from '../../db-service'; -import { TelemetryService } from '../../telemetry-service'; +import { TelemetryVendorService } from '../../telemetry-services/telemetry-vendor-service'; import { ExportDataStreamOptions, ExportStrategy, ExportStrategyConfig } from '../export-strategy'; const defaultLog = getLogger('services/export-telemetry-strategy'); @@ -63,7 +63,7 @@ export class ExportTelemetryStrategy extends DBService implements ExportStrategy * @memberof ExportTelemetryStrategy */ _getStream = (_options: ExportDataStreamOptions): Readable => { - const telemetryService = new TelemetryService(this.connection); + const telemetryVendorService = new TelemetryVendorService(this.connection); const isUserAdmin = this.config.isUserAdmin; const systemUserId = this.connection.systemUserId(); @@ -74,7 +74,7 @@ export class ExportTelemetryStrategy extends DBService implements ExportStrategy const stream = new Readable({ objectMode: true, read() { - telemetryService + telemetryVendorService .findTelemetry(isUserAdmin, systemUserId, filterFields) .then((telemetry) => { for (const item of telemetry) { diff --git a/api/src/services/import-services/telemetry/import-telemetry-strategy.test.ts b/api/src/services/import-services/telemetry/import-telemetry-strategy.test.ts new file mode 100644 index 0000000000..eb1b6cc078 --- /dev/null +++ b/api/src/services/import-services/telemetry/import-telemetry-strategy.test.ts @@ -0,0 +1,64 @@ +import { expect } from 'chai'; +import sinon from 'sinon'; +import { MediaFile } from '../../../utils/media/media-file'; +import * as worksheetUtils from '../../../utils/xlsx-utils/worksheet-utils'; +import { getMockDBConnection } from '../../../__mocks__/db'; +import { importCSV } from '../import-csv'; +import { ImportTelemetryStrategy } from './import-telemetry-strategy'; + +describe('import-telemetry-strategy', () => { + beforeEach(() => { + sinon.restore(); + }); + + describe('importCSV telemetry worksheet', () => { + it('should validate successfully', async () => { + const worksheet = { + A1: { t: 's', v: 'VENDOR' }, + B1: { t: 's', v: 'SERIAL' }, + C1: { t: 's', v: 'LATITUDE' }, + D1: { t: 's', v: 'LONGITUDE' }, + E1: { t: 's', v: 'DATE' }, + F1: { t: 's', v: 'TIME' }, + + A2: { t: 's', v: 'lotek' }, + B2: { t: 'n', w: '1234', v: 1234 }, + C2: { t: 'n', w: '2', v: 2 }, + D2: { t: 'n', w: '2', v: 2 }, + E2: { z: 'm/d/yy', t: 'd', v: '2024-10-31', w: '10/31/24' }, + F2: { t: 's', v: '10:10:10' }, + + A3: { t: 's', v: 'lotek' }, + B3: { t: 'n', w: '1234', v: 1234 }, + C3: { t: 'n', w: '30', v: 30 }, + D3: { t: 'n', w: '30', v: 30 }, + E3: { z: 'm/d/yy', t: 'd', v: '2024-10-31', w: '10/31/24' }, + + '!ref': 'A1:F3' + }; + + const mockDBConnection = getMockDBConnection(); + + const importTelemetryStrategy = new ImportTelemetryStrategy(mockDBConnection, 1); + + sinon.stub(worksheetUtils, 'getDefaultWorksheet').returns(worksheet); + + sinon.stub(importTelemetryStrategy.telemetryVendorService.deploymentService, 'getDeploymentsForSurvey').resolves([ + { + deployment_id: 1, + device_key: 'lotek:1234', + attachment_start_timestamp: '2024-10-21 10:10:10', + attachment_end_timestamp: null + } + ] as any); + + sinon.stub(importTelemetryStrategy.telemetryVendorService, 'bulkCreateManualTelemetry').resolves(); + + try { + await importCSV(new MediaFile('test', 'test', 'test' as unknown as Buffer), importTelemetryStrategy); + } catch (err: any) { + expect.fail(); + } + }); + }); +}); diff --git a/api/src/services/import-services/telemetry/import-telemetry-strategy.ts b/api/src/services/import-services/telemetry/import-telemetry-strategy.ts new file mode 100644 index 0000000000..ae27c44ccd --- /dev/null +++ b/api/src/services/import-services/telemetry/import-telemetry-strategy.ts @@ -0,0 +1,127 @@ +import { z } from 'zod'; +import { TelemetryManualRecord } from '../../../database-models/telemetry_manual'; +import { IDBConnection } from '../../../database/db'; +import { CSV_COLUMN_ALIASES } from '../../../utils/xlsx-utils/column-aliases'; +import { generateColumnCellGetterFromColumnValidator } from '../../../utils/xlsx-utils/column-validator-utils'; +import { IXLSXCSVValidator } from '../../../utils/xlsx-utils/worksheet-utils'; +import { DBService } from '../../db-service'; +import { getTelemetryDeviceKey } from '../../telemetry-services/telemetry-utils'; +import { TelemetryVendorService } from '../../telemetry-services/telemetry-vendor-service'; +import { CSVImportStrategy, Row } from '../import-csv.interface'; +import { formatTimestampString } from '../utils/datetime'; + +/** + * ImportTelemetryStrategy + * + * @export + * + * @class + * @extends {DBService} + * @implements {CSVImportStrategy} + */ +export class ImportTelemetryStrategy extends DBService implements CSVImportStrategy { + surveyId: number; + + telemetryVendorService: TelemetryVendorService; + + /** + * An XLSX validation config for the standard columns of a SIMS Telemetry CSV. + * + * Note: `satisfies` allows `keyof` to correctly infer keyof type, while also + * enforcing uppercase object keys. + */ + columnValidator = { + SERIAL: { type: 'stringOrNumber', aliases: ['DEVICE_ID'] }, // 1234 + VENDOR: { type: 'string' }, // lotek + LATITUDE: { type: 'number', aliases: CSV_COLUMN_ALIASES.LATITUDE }, + LONGITUDE: { type: 'number', aliases: CSV_COLUMN_ALIASES.LONGITUDE }, + DATE: { type: 'date' }, + TIME: { type: 'string', optional: true } + } satisfies IXLSXCSVValidator; + + /** + * Construct an instance of ImportTelemetryStrategy. + * + * @param {IDBConnection} connection - DB connection + * @param {number} surveyId + */ + constructor(connection: IDBConnection, surveyId: number) { + super(connection); + + this.surveyId = surveyId; + + this.telemetryVendorService = new TelemetryVendorService(connection); + } + + /** + * Validate the CSV rows against zod schema. + * + * @param {Row[]} rows - CSV rows + * @returns {*} + */ + async validateRows(rows: Row[]) { + const getColumnCell = generateColumnCellGetterFromColumnValidator(this.columnValidator); + const deployments = await this.telemetryVendorService.deploymentService.getDeploymentsForSurvey(this.surveyId); + + const rowsToValidate: Partial[] = []; + + for (const row of rows) { + // Raw column cell values + const vendor = getColumnCell(row, 'VENDOR').cell.toLowerCase(); + const serial = getColumnCell(row, 'SERIAL').cell; + const latitude = getColumnCell(row, 'LATITUDE').cell; + const longitude = getColumnCell(row, 'LONGITUDE').cell; + const date = getColumnCell(row, 'DATE').cell; + const time = getColumnCell(row, 'TIME').cell; + + // Format additional values + const timestamp = formatTimestampString(date, time); + const deviceKey = getTelemetryDeviceKey({ vendor, serial }); + + // Find the deployment that matches the device key and is within the telemetry date range + // This is making the assumption that only one match can be found (database date/deviceKey constraints) + const deployment = deployments.find((deployment) => { + const telemetryWithinDeployment = + timestamp >= deployment.attachment_start_timestamp && + (deployment.attachment_end_timestamp === null || timestamp <= deployment.attachment_end_timestamp); + + return deployment.device_key === deviceKey && telemetryWithinDeployment; + }); + + // Push the row to validate into the array + rowsToValidate.push({ + deployment_id: deployment?.deployment_id, + latitude: latitude, + longitude: longitude, + acquisition_date: timestamp, + transmission_date: null + }); + } + + // Validate the rows against the zod schema + return z + .array( + z.object({ + deployment_id: z.number({ + required_error: `Unable to infer matching deployment with vendor and serial. Make sure telemetry date and time intersect with deployment attachment start and end dates.` + }), + latitude: z.number(), + longitude: z.number(), + acquisition_date: z.string(), + transmission_date: z.string().nullable() + }) + ) + .safeParse(rowsToValidate); + } + + /** + * Insert manual telemetry into SIMS. + * + * @async + * @param {TelemetryManualRecord[]} telemetry - Parsed CSV telemetry + * @returns {Promise} + */ + async insert(telemetry: TelemetryManualRecord[]): Promise { + return this.telemetryVendorService.bulkCreateManualTelemetry(this.surveyId, telemetry); + } +} diff --git a/api/src/services/import-services/utils/datetime.ts b/api/src/services/import-services/utils/datetime.ts index 8ab52447e4..d3787fa12c 100644 --- a/api/src/services/import-services/utils/datetime.ts +++ b/api/src/services/import-services/utils/datetime.ts @@ -33,6 +33,23 @@ export const formatTimeString = (time?: string | null): string | undefined => { } }; +/** + * Format date and time into timestamp string. + * + * @param {string} date - Date string + * @param {string} [time] - Time string + * @returns {string} Formatted date and time string + */ +export const formatTimestampString = (date: string, time?: string): string => { + const formattedTime = formatTimeString(time); + + if (formattedTime) { + return dayjs(`${date} ${formattedTime}`).format('YYYY-MM-DD HH:mm:ss'); + } + + return dayjs(date).format('YYYY-MM-DD HH:mm:ss'); +}; + /** * Checks if two date strings are equal. * diff --git a/api/src/services/telemetry-service.ts b/api/src/services/telemetry-service.ts deleted file mode 100644 index 86d39e521b..0000000000 --- a/api/src/services/telemetry-service.ts +++ /dev/null @@ -1,359 +0,0 @@ -import dayjs from 'dayjs'; -import { DefaultDateFormat, DefaultTimeFormat } from '../constants/dates'; -import { IDBConnection } from '../database/db'; -import { ApiGeneralError } from '../errors/api-error'; -import { IAllTelemetryAdvancedFilters } from '../models/telemetry-view'; -import { SurveyCritterRecord } from '../repositories/survey-critter-repository'; -import { Deployment, TelemetryRepository, TelemetrySubmissionRecord } from '../repositories/telemetry-repository'; -import { generateS3FileKey, getFileFromS3 } from '../utils/file-utils'; -import { parseS3File } from '../utils/media/media-utils'; -import { CSV_COLUMN_ALIASES } from '../utils/xlsx-utils/column-aliases'; -import { - constructXLSXWorkbook, - getDefaultWorksheet, - getWorksheetRowObjects, - IXLSXCSVValidator, - validateCsvFile -} from '../utils/xlsx-utils/worksheet-utils'; -import { ApiPaginationOptions } from '../zod-schema/pagination'; -import { AttachmentService } from './attachment-service'; -import { BctwDeploymentRecord, BctwDeploymentService } from './bctw-service/bctw-deployment-service'; -import { BctwTelemetryService, IAllTelemetry, ICreateManualTelemetry } from './bctw-service/bctw-telemetry-service'; -import { ICritter, ICritterbaseUser } from './critterbase-service'; -import { DBService } from './db-service'; -import { DeploymentService } from './deployment-service'; -import { SurveyCritterService } from './survey-critter-service'; - -export type FindTelemetryResponse = { telemetry_id: string } & Pick< - IAllTelemetry, - 'acquisition_date' | 'latitude' | 'longitude' | 'telemetry_type' -> & - Pick & - Pick & - Pick & - Pick; - -const telemetryCSVColumnValidator: IXLSXCSVValidator = { - DEVICE_ID: { type: 'number' }, - DATE: { type: 'date' }, - TIME: { type: 'string' }, - LATITUDE: { type: 'number', aliases: CSV_COLUMN_ALIASES.LATITUDE }, - LONGITUDE: { type: 'number', aliases: CSV_COLUMN_ALIASES.LONGITUDE } -}; - -export class TelemetryService extends DBService { - telemetryRepository: TelemetryRepository; - - attachmentService: AttachmentService; - - constructor(connection: IDBConnection) { - super(connection); - - this.telemetryRepository = new TelemetryRepository(connection); - - this.attachmentService = new AttachmentService(connection); - } - - /** - * - * Inserts a survey telemetry submission record into the database and returns the key - * - * @param {Express.Multer.File} file - * @param {number} projectId - * @param {number} surveyId - * @return {*} {Promise<{ key: string }>} - * @memberof ObservationService - */ - async insertSurveyTelemetrySubmission( - file: Express.Multer.File, - projectId: number, - surveyId: number - ): Promise<{ submission_id: number; key: string }> { - const submissionId = await this.telemetryRepository.getNextSubmissionId(); - const key = generateS3FileKey({ projectId, surveyId, submissionId, fileName: file.originalname }); - const result = await this.telemetryRepository.insertSurveyTelemetrySubmission( - submissionId, - key, - surveyId, - file.originalname - ); - return { submission_id: result.survey_telemetry_submission_id, key }; - } - - async processTelemetryCsvSubmission(submissionId: number, user: ICritterbaseUser): Promise { - // step 1 get submission record - const submission = await this.getTelemetrySubmissionById(submissionId); - - // step 2 get s3 record for given key - const s3Object = await getFileFromS3(submission.key); - - // step 3 parse the file - const mediaFile = await parseS3File(s3Object); - - // step 4 validate csv - if (mediaFile.mimetype !== 'text/csv') { - throw new ApiGeneralError( - `Failed to process file for importing telemetry. Incorrect file type. Expected CSV received ${mediaFile.mimetype}` - ); - } - - // step 5 construct workbook/ setup - const xlsxWorkBook = constructXLSXWorkbook(mediaFile); - // Get the default XLSX worksheet - const xlsxWorksheet = getDefaultWorksheet(xlsxWorkBook); - - // step 6 validate columns - if (!validateCsvFile(xlsxWorksheet, telemetryCSVColumnValidator)) { - throw new ApiGeneralError('Failed to process file for importing telemetry. Invalid CSV file.'); - } - - const worksheetRowObjects = getWorksheetRowObjects(xlsxWorksheet); - - // step 7 fetch survey deployments - const deploymentService = new DeploymentService(this.connection); - const bctwDeploymentService = new BctwDeploymentService(user); - - const surveyDeployments = await deploymentService.getDeploymentsForSurveyId(submission.survey_id); - const deployments = await bctwDeploymentService.getDeploymentsByIds( - surveyDeployments.map((deployment) => deployment.bctw_deployment_id) - ); - - // step 8 parse file data and find deployment ids based on device id and attachment dates - const itemsToAdd: ICreateManualTelemetry[] = []; - worksheetRowObjects.forEach((row) => { - const deviceId = Number(row['DEVICE_ID']); - const start = row['DATE']; - const time = row['TIME']; - const dateTime = dayjs(`${start} ${time}`); - - const foundDeployment = deployments.find((item) => { - const currentStart = dayjs(item.attachment_start); - const currentEnd = dayjs(item.attachment_end); - // check the device ids match - if (item.device_id === deviceId) { - // check the date is same or after the device deployment start date - if (dateTime.isAfter(currentStart) || dateTime.isSame(currentStart)) { - if (item.attachment_end) { - // check if the date is same or before the device was removed - if (dateTime.isBefore(currentEnd) || dateTime.isSame(currentEnd)) { - return true; - } - } else { - // no attachment end date means the device is still active and is a match - return true; - } - } - } - return false; - }); - - if (foundDeployment) { - itemsToAdd.push({ - deployment_id: foundDeployment.deployment_id, - acquisition_date: dateTime.format(`${DefaultDateFormat} ${DefaultTimeFormat}`), - latitude: row['LATITUDE'], - longitude: row['LONGITUDE'] - }); - } else { - throw new ApiGeneralError( - `No deployment was found for device: ${deviceId} on: ${dateTime.format( - `${DefaultDateFormat} ${DefaultTimeFormat}` - )}` - ); - } - }); - - // step 9 create telemetries - - const bctwTelemetryService = new BctwTelemetryService(user); - - if (itemsToAdd.length > 0) { - try { - return await bctwTelemetryService.createManualTelemetry(itemsToAdd); - } catch (error) { - throw new ApiGeneralError('Error adding Manual Telemetry'); - } - } - - return []; - } - - async getTelemetrySubmissionById(submissionId: number): Promise { - return this.telemetryRepository.getTelemetrySubmissionById(submissionId); - } - - /** - * Get deployments for the given critter ids. - * - * Note: SIMS does not store deployment information, beyond an ID. Deployment details must be fetched from the - * external BCTW API. - * - * @param {number[]} critterIds - * @return {*} {Promise} - * @memberof TelemetryService - */ - async getDeploymentsByCritterIds(critterIds: number[]): Promise { - return this.telemetryRepository.getDeploymentsByCritterIds(critterIds); - } - - /** - * Get deployments for the provided survey id. - * - * Note: SIMS does not store deployment information, beyond an ID. Deployment details must be fetched from the - * external BCTW API. - * - * @param {number} surveyId - * @return {*} {Promise} - * @memberof TelemetryService - */ - async getDeploymentsBySurveyId(surveyId: number): Promise { - return this.telemetryRepository.getDeploymentsBySurveyId(surveyId); - } - - /** - * Retrieves the paginated list of all telemetry records that are available to the user, based on their permissions - * and provided filter criteria. - * - * @param {boolean} isUserAdmin - * @param {(number | null)} systemUserId The system user id of the user making the request - * @param {IAllTelemetryAdvancedFilters} [filterFields] - * @param {ApiPaginationOptions} [pagination] - * @return {*} {Promise} - * @memberof TelemetryService - */ - async findTelemetry( - isUserAdmin: boolean, - systemUserId: number | null, - filterFields?: IAllTelemetryAdvancedFilters, - pagination?: ApiPaginationOptions - ): Promise { - // --- Step 1 ----------------------------- - - const surveyCritterService = new SurveyCritterService(this.connection); - // The SIMS critter records the user has access to - const simsCritters = await surveyCritterService.findCritters( - isUserAdmin, - systemUserId, - filterFields, - // Remove the sort and order from the pagination object as these are based on the telemetry sort columns and - // may not be valid for the critter columns - // TODO: Is there a better way to achieve this pagination safety? - pagination - ? { - ...pagination, - sort: undefined, - order: undefined - } - : undefined - ); - - if (!simsCritters.length) { - // Exit early if there are no SIMS critters, and therefore no telemetry - return []; - } - - // --- Step 2 ------------------------------ - - const simsCritterIds = simsCritters.map((critter) => critter.critter_id); - // The sims deployment records the user has access to - const simsDeployments = await this.telemetryRepository.getDeploymentsByCritterIds(simsCritterIds); - - if (!simsDeployments.length) { - // Exit early if there are no SIMS deployments, and therefore no telemetry - return []; - } - - // --- Step 3 ------------------------------ - - const critterbaseCritterIds = simsCritters - .filter((simsCritter) => - simsDeployments.some((surveyDeployment) => surveyDeployment.critter_id === simsCritter.critter_id) - ) - .map((critter) => critter.critterbase_critter_id); - - if (!critterbaseCritterIds.length) { - // Exit early if there are no critterbase critters, and therefore no telemetry - return []; - } - - const user = { - keycloak_guid: this.connection.systemUserGUID(), - username: this.connection.systemUserIdentifier() - }; - - const bctwDeploymentService = new BctwDeploymentService(user); - const bctwTelemetryService = new BctwTelemetryService(user); - - // The detailed deployment records from BCTW - // Note: This may include records the user does not have acces to (A critter may have multiple deployments over its - // lifespan, but the user may only have access to a subset of them). - const allBctwDeploymentsForCritters = await bctwDeploymentService.getDeploymentsByCritterId(critterbaseCritterIds); - - // Remove records the user does not have access to - const usersBctwDeployments = allBctwDeploymentsForCritters.filter((deployment) => - simsDeployments.some((item) => item.bctw_deployment_id === deployment.deployment_id) - ); - const usersBctwDeploymentIds = usersBctwDeployments.map((deployment) => deployment.deployment_id); - - if (!usersBctwDeploymentIds.length) { - // Exit early if there are no BCTW deployments the user has access to, and therefore no telemetry - return []; - } - - // --- Step 4 ------------------------------ - - // The telemetry records for the deployments the user has access to - const allTelemetryRecords = await bctwTelemetryService.getAllTelemetryByDeploymentIds(usersBctwDeploymentIds); - - // --- Step 5 ------------------------------ - - // Parse/combine the telemetry, deployment, and critter records into the final response - const response: FindTelemetryResponse[] = []; - for (const telemetryRecord of allTelemetryRecords) { - const usersBctwDeployment = usersBctwDeployments.find( - (usersBctwDeployment) => usersBctwDeployment.deployment_id === telemetryRecord.deployment_id - ); - - if (!usersBctwDeployment) { - continue; - } - - const simsDeployment = simsDeployments.find( - (simsDeployment) => simsDeployment.bctw_deployment_id === telemetryRecord.deployment_id - ); - - if (!simsDeployment) { - continue; - } - - const simsCritter = simsCritters.find( - (simsCritter) => simsCritter.critterbase_critter_id === usersBctwDeployment?.critter_id - ); - - if (!simsCritter) { - continue; - } - - response.push({ - // IAllTelemetry - telemetry_id: telemetryRecord.telemetry_id ?? telemetryRecord.telemetry_manual_id, - acquisition_date: telemetryRecord.acquisition_date, - latitude: telemetryRecord.latitude, - longitude: telemetryRecord.longitude, - telemetry_type: telemetryRecord.telemetry_type, - // BctwDeploymentRecord - device_id: usersBctwDeployment.device_id, - // Deployment - bctw_deployment_id: telemetryRecord.deployment_id, - critter_id: simsDeployment.critter_id, - deployment_id: simsDeployment.deployment_id, - // SurveyCritterRecord - critterbase_critter_id: usersBctwDeployment.critter_id, - // ICritter - animal_id: simsCritter.animal_id - }); - } - - return response; - } -} diff --git a/api/src/services/telemetry-services/telemetry-deployment-service.ts b/api/src/services/telemetry-services/telemetry-deployment-service.ts new file mode 100644 index 0000000000..8a1fd08700 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-deployment-service.ts @@ -0,0 +1,155 @@ +import { DeploymentRecord } from '../../database-models/deployment'; +import { IDBConnection } from '../../database/db'; +import { ApiGeneralError } from '../../errors/api-error'; +import { IDeploymentAdvancedFilters } from '../../models/deployment-view'; +import { TelemetryDeploymentRepository } from '../../repositories/telemetry-repositories/telemetry-deployment-repository'; +import { + CreateDeployment, + ExtendedDeploymentRecord, + UpdateDeployment +} from '../../repositories/telemetry-repositories/telemetry-deployment-repository.interface'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { DBService } from '../db-service'; + +/** + * Service class for working with deployments. + * + * @export + * @class TelemetryDeploymentService + * @extends {DBService} + */ +export class TelemetryDeploymentService extends DBService { + telemetryDeploymentRepository: TelemetryDeploymentRepository; + + constructor(connection: IDBConnection) { + super(connection); + + this.telemetryDeploymentRepository = new TelemetryDeploymentRepository(connection); + } + + /** + * Create a new deployment. + * + * @param {CreateDeployment} deployment The deployment data to create + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async createDeployment(deployment: CreateDeployment): Promise { + return this.telemetryDeploymentRepository.createDeployment(deployment); + } + + /** + * Get a specific deployment by its integer ID. + * + * @param {number} surveyId The survey ID + * @param {number} deploymentId The deployment ID + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async getDeploymentById(surveyId: number, deploymentId: number): Promise { + const deployments = await this.telemetryDeploymentRepository.getDeploymentsForSurvey(surveyId, [deploymentId]); + + if (deployments.length !== 1) { + throw new ApiGeneralError(`Failed to get deployment`, ['TelemetryDeploymentService->getDeploymentById']); + } + + return deployments[0]; + } + + /** + * Retrieves the paginated list of deployments under a survey, based on the provided filter params. + * + * @param {number} surveyId + * @param {number[]} [deploymentIds] + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryDeploymentRepository + */ + async getDeploymentsForSurvey( + surveyId: number, + deploymentIds?: number[], + pagination?: ApiPaginationOptions + ): Promise { + return this.telemetryDeploymentRepository.getDeploymentsForSurvey(surveyId, deploymentIds, pagination); + } + + /** + * Retrieves the paginated list of all deployments that are available to the user, based on their permissions and + * provided filter criteria. + * + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IDeploymentAdvancedFilters} filterFields + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async findDeployments( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IDeploymentAdvancedFilters, + pagination?: ApiPaginationOptions + ): Promise { + return this.telemetryDeploymentRepository.findDeployments(isUserAdmin, systemUserId, filterFields, pagination); + } + + /** + * Get deployments for a Critter. + * + * @param {number} surveyId The survey ID + * @param {number} critterId The critter ID + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async getDeploymentsForCritterId(surveyId: number, critterId: number): Promise { + return this.telemetryDeploymentRepository.getDeploymentsForCritterId(surveyId, critterId); + } + + /** + * Get the total count of all deployments for a survey. + * + * @param {number} surveyId + * @return {*} {Promise} + * @memberof telemetryDeploymentRepository + */ + async getDeploymentsCount(surveyId: number): Promise { + return this.telemetryDeploymentRepository.getDeploymentsCount(surveyId); + } + + /** + * Update a deployment. + * + * @param {number} surveyId The survey ID + * @param {number} deployment_id The deployment ID + * @param {UpdateDeployment} deployment The deployment data to update + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async updateDeployment(surveyId: number, deployment_id: number, deployment: UpdateDeployment): Promise { + return this.telemetryDeploymentRepository.updateDeployment(surveyId, deployment_id, deployment); + } + + /** + * Delete a deployment. + * + * @param {number} surveyId The survey ID + * @param {number} deploymentId The deployment ID + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async deleteDeployment(surveyId: number, deploymentId: number): Promise { + return this.telemetryDeploymentRepository.deleteDeployment(surveyId, deploymentId); + } + + /** + * Deletes deployments. + * + * @param {number} surveyId The survey ID + * @param {number[]} deploymentIds The deployment IDs + * @return {*} {Promise} + * @memberof TelemetryDeploymentService + */ + async deleteDeployments(surveyId: number, deploymentIds: number[]): Promise { + return this.telemetryDeploymentRepository.deleteDeployments(surveyId, deploymentIds); + } +} diff --git a/api/src/services/telemetry-services/telemetry-device-service.test.ts b/api/src/services/telemetry-services/telemetry-device-service.test.ts new file mode 100644 index 0000000000..9b831dff21 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-device-service.test.ts @@ -0,0 +1,162 @@ +import chai, { expect } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { TelemetryDeviceRepository } from '../../repositories/telemetry-repositories/telemetry-device-repository'; +import { getMockDBConnection } from '../../__mocks__/db'; +import { TelemetryDeviceService } from './telemetry-device-service'; + +chai.use(sinonChai); + +describe('TelemetryDeviceService', () => { + beforeEach(() => { + sinon.restore(); + }); + + describe('getDevice', () => { + it('should return a device by its ID', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'getDevicesByIds').resolves([true] as any); + + const device = await service.getDevice(1, 2); + + expect(repoStub).to.have.been.calledOnceWithExactly(1, [2]); + expect(device).to.be.true; + }); + + it('should throw an error if unable to get device', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'getDevicesByIds').resolves([]); + + try { + await service.getDevice(1, 2); + expect.fail(); + } catch (err: any) { + expect(err.message).to.equal('Device not found'); + } + + expect(repoStub).to.have.been.calledOnceWithExactly(1, [2]); + }); + }); + + describe('findDeviceBySerial', () => { + it('should return a device with the given serial and make if it exists in the survey', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const mockDevice = { + device_make_id: 1, + model: null, + survey_id: 1, + device_id: 1, + device_key: '1:lotek', + serial: 'serial', + comment: 'comment' + }; + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'findDeviceBySerial').resolves(mockDevice); + + const device = await service.findDeviceBySerial(1, 2, 1); + + expect(repoStub).to.have.been.calledOnceWithExactly(1, 2, 1); + expect(device).to.eql(mockDevice); + }); + + it('should return null if a device with the given serial and make does not exist in the survey', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'findDeviceBySerial').resolves(null); + + const device = await service.findDeviceBySerial(1, 2, 1); + + expect(repoStub).to.have.been.calledOnceWithExactly(1, 2, 1); + expect(device).to.eql(null); + }); + }); + + describe('deleteDevice', () => { + it('should delete a device by its ID', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon + .stub(TelemetryDeviceRepository.prototype, 'deleteDevicesByIds') + .resolves([{ device_id: 2 }] as any); + + const device = await service.deleteDevice(1, 2); + + expect(repoStub).to.have.been.calledOnceWithExactly(1, [2]); + expect(device).to.be.equal(2); + }); + + it('should throw an error if unable to delete device', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'deleteDevicesByIds').resolves([]); + + try { + await service.deleteDevice(1, 2); + expect.fail(); + } catch (err: any) { + expect(err.message).to.equal('Unable to delete device'); + } + + expect(repoStub).to.have.been.calledOnceWithExactly(1, [2]); + }); + }); + + describe('createDevice', () => { + it('should delete a device by its ID', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'createDevice').resolves(true as any); + + const device = await service.createDevice({ + device_make_id: 1, + model: null, + survey_id: 1, + serial: 'serial', + comment: 'comment' + }); + + expect(repoStub).to.have.been.calledOnceWithExactly({ + device_make_id: 1, + model: null, + survey_id: 1, + serial: 'serial', + comment: 'comment' + }); + + expect(device).to.be.equal(true); + }); + }); + + describe('updateDevice', () => { + it('should update a device by its ID', async () => { + const mockConnection = getMockDBConnection(); + const service = new TelemetryDeviceService(mockConnection); + + const repoStub = sinon.stub(TelemetryDeviceRepository.prototype, 'updateDevice').resolves(true as any); + + const device = await service.updateDevice(1, 2, { + device_make_id: 1, + serial: 'serial', + comment: 'comment' + }); + + expect(repoStub).to.have.been.calledOnceWithExactly(1, 2, { + device_make_id: 1, + serial: 'serial', + comment: 'comment' + }); + + expect(device).to.be.equal(true); + }); + }); +}); diff --git a/api/src/services/telemetry-services/telemetry-device-service.ts b/api/src/services/telemetry-services/telemetry-device-service.ts new file mode 100644 index 0000000000..68bc7970f1 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-device-service.ts @@ -0,0 +1,179 @@ +import { DeviceRecord } from '../../database-models/device'; +import { IDBConnection } from '../../database/db'; +import { ApiGeneralError } from '../../errors/api-error'; +import { TelemetryDeviceRepository } from '../../repositories/telemetry-repositories/telemetry-device-repository'; +import { + CreateTelemetryDevice, + DeviceAdvancedFilters, + UpdateTelemetryDevice +} from '../../repositories/telemetry-repositories/telemetry-device-repository.interface'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { DBService } from '../db-service'; + +/** + * A service class for working with telemetry devices. + * + * Note: A telemetry `device` is different than a `deployment`. + * A device may have multiple deployments, but a deployment is associated with a single device. + * + * Device: The physical device. + * Deployment: The time period during which a device is attached to an animal. + * + * @export + * @class TelemetryDeviceService + * @extends {DBService} + */ +export class TelemetryDeviceService extends DBService { + telemetryDeviceRepository: TelemetryDeviceRepository; + + constructor(connection: IDBConnection) { + super(connection); + this.telemetryDeviceRepository = new TelemetryDeviceRepository(connection); + } + + /** + * Create a new device record. + * + * @param {CreateTelemetryDevice} device + * @returns {*} {Promise} + */ + async createDevice(device: CreateTelemetryDevice): Promise { + return this.telemetryDeviceRepository.createDevice(device); + } + + /** + * Get a single device by its ID. + * + * @throws {ApiGeneralError} If the device is not found. + * + * @param {number} surveyId + * @param {number} deviceId + * @return {*} {Promise} + */ + async getDevice(surveyId: number, deviceId: number): Promise { + const devices = await this.telemetryDeviceRepository.getDevicesByIds(surveyId, [deviceId]); + + if (devices.length !== 1) { + throw new ApiGeneralError('Device not found', ['TelemetryDeviceService -> getDevice']); + } + + return devices[0]; + } + + /** + * Finds a device by a given serial number and make in the given survey + * + * @param {number} surveyId + * @param {number} serial + * @param {number} deviceMakeId + * @return {*} {Promise} + */ + async findDeviceBySerial(surveyId: number, serial: number, deviceMakeId: number): Promise { + return this.telemetryDeviceRepository.findDeviceBySerial(surveyId, serial, deviceMakeId); + } + + /** + * Get a list of devices by their IDs. + * + * @param {number} surveyId + * @param {number[]} deviceIds + * @returns {*} {Promise} + * + */ + async getDevices(surveyId: number, deviceIds: number[]): Promise { + return this.telemetryDeviceRepository.getDevicesByIds(surveyId, deviceIds); + } + + /** + * Get all devices for a survey, based on pagination options. + * + * @param {number} surveyId + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryDeviceService + */ + async getDevicesForSurvey(surveyId: number, pagination?: ApiPaginationOptions): Promise { + return this.telemetryDeviceRepository.getDevicesForSurvey(surveyId, pagination); + } + + /** + * Find devices. + * + * @param {boolean} isUserAdmin Whether the user is an admin. + * @param {(number | null)} systemUserId The user's ID. + * @param {DeviceAdvancedFilters} filterFields The filter fields to apply. + * @param {ApiPaginationOptions} [pagination] The pagination options. + * @return {*} {Promise} + * @memberof TelemetryDeviceService + * + */ + async findDevices( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: DeviceAdvancedFilters, + pagination?: ApiPaginationOptions + ): Promise { + return this.telemetryDeviceRepository.findDevices(isUserAdmin, systemUserId, filterFields, pagination); + } + + /** + * Get the total count of all devices for a survey. + * + * @param {number} surveyId + * @return {*} {Promise} + * @memberof TelemetryDeviceService + */ + async getDevicesCount(surveyId: number): Promise { + return this.telemetryDeviceRepository.getDevicesCount(surveyId); + } + + /** + * Update an existing device record. + * + * @param {number} surveyId + * @param {number} deviceId + * @param {UpdateTelemetryDevice} device + * @returns {*} {Promise} + */ + async updateDevice(surveyId: number, deviceId: number, device: UpdateTelemetryDevice): Promise { + return this.telemetryDeviceRepository.updateDevice(surveyId, deviceId, device); + } + + /** + * Delete a single device by its ID. + * + * @throws {ApiGeneralError} If unable to delete the device. + * + * @param {number} surveyId + * @param {number} deviceId + * @return {*} {Promise} The device ID that was deleted. + */ + async deleteDevice(surveyId: number, deviceId: number): Promise { + const devices = await this.telemetryDeviceRepository.deleteDevicesByIds(surveyId, [deviceId]); + + if (devices.length !== 1 || devices[0].device_id !== deviceId) { + throw new ApiGeneralError('Unable to delete device', ['TelemetryDeviceService -> deleteDevice']); + } + + return devices[0].device_id; + } + + /** + * Deletes one or more devices by ID. + * + * @param {number} surveyId + * @param {number[]} deviceIds + * @return {*} {Promise} + * @memberof TelemetryDeviceService + */ + async deleteDevices(surveyId: number, deviceIds: number[]): Promise { + const devices = await this.telemetryDeviceRepository.deleteDevicesByIds(surveyId, deviceIds); + + if (devices.length !== deviceIds.length) { + throw new ApiGeneralError('Unable to delete devices', [ + 'TelemetryDeviceService -> deleteDevices', + `Expected ${deviceIds.length} devices to be deleted, but only ${devices.length} were deleted.` + ]); + } + } +} diff --git a/api/src/services/telemetry-services/telemetry-lotek-service.interface.ts b/api/src/services/telemetry-services/telemetry-lotek-service.interface.ts new file mode 100644 index 0000000000..13fe2dcadd --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-lotek-service.interface.ts @@ -0,0 +1,7 @@ +//Interface for a Lotek API Device. Raw API response. +export interface LotekAPIDevice { + nDeviceID: number; + strSpecialID: string; + dtCreated: string; + strSatellite: string; +} diff --git a/api/src/services/telemetry-services/telemetry-lotek-service.test.ts b/api/src/services/telemetry-services/telemetry-lotek-service.test.ts new file mode 100644 index 0000000000..bd787f0a70 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-lotek-service.test.ts @@ -0,0 +1,277 @@ +import chai, { expect } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as env from '../../utils/env-config'; +import { getMockDBConnection } from '../../__mocks__/db'; +import { TelemetryLotekService } from './telemetry-lotek-service'; + +chai.use(sinonChai); + +describe('TelemetryLotekService', () => { + beforeEach(() => { + sinon.restore(); + }); + + describe('lotekCient', () => { + it('should create a new Axios client with the correct base URL', () => { + sinon.stub(env, 'getEnvironmentVariable').returns('https://webservice.lotek.com'); + + const service = new TelemetryLotekService(getMockDBConnection()); + + expect(service.lotekClient.defaults.baseURL).to.equal('https://webservice.lotek.com/API'); + }); + }); + + describe('fetchTokenFromLotek', () => { + it('should return the cached token if exists', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + const axiosStub = sinon.stub(service.lotekClient, 'post'); + + service.token = 'CACHED_TOKEN'; + + const token = await service.fetchTokenFromLotek(); + + expect(axiosStub).to.not.have.been.called; + expect(token).to.equal('CACHED_TOKEN'); + }); + + it('should fetch a new token from the Lotek API', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + + const getEnvStub = sinon.stub(env, 'getEnvironmentVariable'); + + getEnvStub.onCall(0).returns('test-username'); + getEnvStub.onCall(1).returns('test-password'); + + const axiosStub = sinon.stub(service.lotekClient, 'post').resolves({ data: { access_token: 'NEW_TOKEN' } }); + + expect(service.token).to.be.undefined; + + const token = await service.fetchTokenFromLotek(); + + expect(axiosStub).to.have.been.calledOnceWithExactly( + '/user/login', + { + username: 'test-username', + password: 'test-password', + grant_type: 'password' + }, + { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded' + } + } + ); + + expect(token).to.equal('NEW_TOKEN'); + expect(service.token).to.equal('NEW_TOKEN'); + }); + }); + + describe('fetchDevicesFromLotek', () => { + it('should fetch devices from the Lotek API', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + service.token = 'TEST_TOKEN'; + + const axiosStub = sinon.stub(service.lotekClient, 'get').resolves({ data: [{ serial: 1 }] }); + + const devices = await service.fetchDevicesFromLotek(); + + expect(axiosStub).to.have.been.calledOnceWithExactly('/devices', { + headers: { + Authorization: `Bearer TEST_TOKEN` + } + }); + + expect(devices).to.deep.equal([{ serial: 1 }]); + }); + }); + + describe('fetchTelemetryCountFromLotek', () => { + it('should fetch telemetry count from the Lotek API', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + service.token = 'TEST_TOKEN'; + + const axiosStub = sinon.stub(service.lotekClient, 'get').resolves({ data: 'Number of positions: 10' }); + + const count = await service.fetchTelemetryCountFromLotek({ + deviceId: 1, + dtStart: '2021-01-01', + dtEnd: '2021-01-01' + }); + + expect(axiosStub).to.have.been.calledOnceWithExactly('/gps/count', { + params: { + deviceId: 1, + dtStart: '2021-01-01', + dtEnd: '2021-01-01' + }, + headers: { + Authorization: `Bearer TEST_TOKEN` + } + }); + + expect(count).to.equal(10); + }); + + it('should convert all numbers correctly from response string', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + service.token = 'TEST_TOKEN'; + + const axiosStub = sinon.stub(service.lotekClient, 'get'); + + axiosStub.onCall(0).resolves({ data: 'Number of positions: 10' }); + const count = await service.fetchTelemetryCountFromLotek({ + deviceId: 1, + dtStart: '2021-01-01', + dtEnd: '2021-01-01' + }); + expect(count).to.equal(10); + + axiosStub.onCall(1).resolves({ data: 'Number of positions: 0' }); + const count1 = await service.fetchTelemetryCountFromLotek({ + deviceId: 1, + dtStart: '2021-01-01', + dtEnd: '2021-01-01' + }); + expect(count1).to.equal(0); + + try { + axiosStub.onCall(2).resolves({ data: 'Number of positions: ' }); + await service.fetchTelemetryCountFromLotek({ + deviceId: 1, + dtStart: '2021-01-01', + dtEnd: '2021-01-01' + }); + expect.fail(); + } catch (err: any) { + expect(err.message).to.equal('Failed to fetch device telemetry count from Lotek.'); + } + }); + }); + + describe('getDevicesActivitiesMap', () => { + it('should return a map of device activities', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + const repoStub = sinon + .stub(service.telemetryLotekRepository, 'getDeviceActivityStatistics') + .resolves([{ serial: 1, telemetry_count: 2, last_acquisition: '2021-01-01' }]); + + const activity = await service.getDevicesActivitiesMap(); + + expect(repoStub).to.have.been.calledOnceWithExactly(); + expect(activity).to.deep.equal(new Map([[1, { telemetryCount: 2, lastAcquisition: '2021-01-01' }]])); + }); + }); + + describe('batchCreateTelemetry', () => { + it('should batch insert telemetry data', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + const telemetry = [{ id: 1 }, { id: 2 }]; + + const repoStub = sinon.stub(service.telemetryLotekRepository, 'createLotekTelemetry').resolves(1); + + const inserted = await service.batchCreateTelemetry(telemetry as any, 1); + + expect(repoStub.getCall(0)).to.have.been.calledWithExactly([{ id: 1 }]); + expect(repoStub.getCall(1)).to.have.been.calledWithExactly([{ id: 2 }]); + expect(repoStub).to.have.been.calledTwice; + expect(inserted).to.deep.equal(2); + }); + }); + + describe('processTelemetry', () => { + it('should not fetch telemetry when Lotek count is equal to SIMS count', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + const tasks = [{ serial: 1 }, { serial: 2 }]; + + const activityStub = sinon.stub(service, 'getDevicesActivitiesMap'); + const countStub = sinon.stub(service, 'fetchTelemetryCountFromLotek'); + const telemetryStub = sinon.stub(service, 'fetchTelemetryFromLotek'); + + activityStub.resolves( + new Map([ + [1, { telemetryCount: 2, lastAcquisition: '2021-01-01' }], + [2, { telemetryCount: 2, lastAcquisition: '2021-01-01' }] + ]) + ); + countStub.resolves(2); + + const results = await service.processTelemetry(tasks, { concurrently: 1, batchSize: 1 }); + + expect(activityStub).to.have.been.calledOnce; + + expect(countStub.getCall(0)).to.have.been.calledWithExactly({ + deviceId: 1 + }); + + expect(countStub.getCall(1)).to.have.been.calledWithExactly({ + deviceId: 2 + }); + + expect(telemetryStub).not.to.have.been.called; + expect(results).to.deep.equal([ + { + task: { serial: 1 }, + value: { new: 0, created: 0 } + }, + { + task: { serial: 2 }, + value: { new: 0, created: 0 } + } + ]); + }); + + it('should fetch telemetry when Lotek count is more than SIMS count', async () => { + const service = new TelemetryLotekService(getMockDBConnection()); + const tasks = [{ serial: 1 }, { serial: 2 }]; + + const activityStub = sinon.stub(service, 'getDevicesActivitiesMap'); + const countStub = sinon.stub(service, 'fetchTelemetryCountFromLotek'); + const telemetryStub = sinon.stub(service, 'fetchTelemetryFromLotek'); + const createStub = sinon.stub(service, 'batchCreateTelemetry'); + + activityStub.resolves( + new Map([ + [1, { telemetryCount: 2, lastAcquisition: '2021-01-01' }], + [2, { telemetryCount: 2, lastAcquisition: '2021-01-01' }] + ]) + ); + + countStub.resolves(3); + createStub.resolves(1); + + const results = await service.processTelemetry(tasks, { concurrently: 1, batchSize: 1 }); + + expect(activityStub).to.have.been.calledOnce; + + expect(countStub.getCall(0)).to.have.been.calledWithExactly({ deviceId: 1 }); + expect(countStub.getCall(1)).to.have.been.calledWithExactly({ deviceId: 2 }); + + expect(telemetryStub.getCall(0)).to.have.been.calledWithExactly({ + deviceId: 1, + dtStart: '2021-01-01', + dtEnd: undefined + }); + + expect(telemetryStub.getCall(1)).to.have.been.calledWithExactly({ + deviceId: 2, + dtStart: '2021-01-01', + dtEnd: undefined + }); + + expect(results).to.deep.equal([ + { + task: { serial: 1 }, + value: { new: 1, created: 1 } + }, + { + task: { serial: 2 }, + value: { new: 1, created: 1 } + } + ]); + + expect(createStub).to.have.been.calledTwice; + }); + }); +}); diff --git a/api/src/services/telemetry-services/telemetry-lotek-service.ts b/api/src/services/telemetry-services/telemetry-lotek-service.ts new file mode 100644 index 0000000000..4cf6a48252 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-lotek-service.ts @@ -0,0 +1,250 @@ +import axios, { AxiosInstance } from 'axios'; +import { chunk } from 'lodash'; +import qs from 'qs'; +import { IDBConnection } from '../../database/db'; +import { ApiGeneralError } from '../../errors/api-error'; +import { formatAxiosError } from '../../errors/axios-error'; +import { TelemetryLotekRepository } from '../../repositories/telemetry-repositories/telemetry-lotek-repository'; +import { + LotekAPIQuery, + LotekPayload, + LotekTask +} from '../../repositories/telemetry-repositories/telemetry-lotek-repository.interface'; +import { getEnvironmentVariable } from '../../utils/env-config'; +import { getLogger } from '../../utils/logger'; +import { QueueResult, taskQueue } from '../../utils/task-queue'; +import { DBService } from '../db-service'; +import { LotekAPIDevice } from './telemetry-lotek-service.interface'; +import { keysToLowerCase } from './telemetry-utils'; +import { TelemetryProcessingOptions, TelemetryProcessingResult } from './telemetry.interface'; + +const defaultLog = getLogger('telemetry-lotek-service'); + +/** + * This service is responsible for fetching telemetry data from the Lotek API and storing it in SIMS. + * + * @see https://webservice.lotek.com/API/Help + * + * @export + * @class TelemetryVendorService + * @extends {DBService} + */ +export class TelemetryLotekService extends DBService { + lotekClient: AxiosInstance; + + token: string | undefined; + + telemetryLotekRepository: TelemetryLotekRepository; + + /** + * Creates an instance of TelemetryLotekService. + * + * @param {IDBConnection} connection + */ + constructor(connection: IDBConnection) { + super(connection); + + this.lotekClient = axios.create({ + paramsSerializer: (params) => qs.stringify(params), + baseURL: `${getEnvironmentVariable('LOTEK_API_HOST')}/API` + }); + + this.token = undefined; + + this.telemetryLotekRepository = new TelemetryLotekRepository(connection); + } + + /** + * Authenticate Lotek API account and return access token. + * + * @throws {ApiGeneralError} Failed to authenticate with Lotek API + * @returns {Promise} The access token + */ + async fetchTokenFromLotek(): Promise { + // Return cached token if already authenticated + if (this.token) { + return this.token; + } + + try { + const response = await this.lotekClient.post( + `/user/login`, + { + username: getEnvironmentVariable('LOTEK_ACCOUNT_USERNAME'), + password: getEnvironmentVariable('LOTEK_ACCOUNT_PASSWORD'), + grant_type: 'password' + }, + { headers: { 'Content-Type': 'application/x-www-form-urlencoded' } } + ); + + // Cache token for future requests + this.token = response.data.access_token; + + return response.data.access_token; + } catch (error) { + throw new ApiGeneralError('Failed to authenticate with Lotek.', [formatAxiosError(error)]); + } + } + + /** + * Fetch devices associated with the authenticated Lotek API account. + * + * @throws {ApiGeneralError} Failed to fetch devices from Lotek API + * @returns {Promise} The list of devices + */ + async fetchDevicesFromLotek(): Promise { + const token = await this.fetchTokenFromLotek(); + try { + const response = await this.lotekClient.get(`/devices`, { + headers: { + Authorization: `Bearer ${token}` + } + }); + + return response.data; + } catch (error) { + throw new ApiGeneralError('Failed to fetch devices from Lotek.', [formatAxiosError(error)]); + } + } + + /** + * Fetch telemetry data count for a single device from Lotek API. + * + * @throws {ApiGeneralError} Failed to fetch device telemetry count from Lotek API + * @param {LotekAPIQuery} query - Lotek API request query + * @returns {Promise} The number of telemetry records + */ + async fetchTelemetryCountFromLotek(query: LotekAPIQuery): Promise { + try { + const token = await this.fetchTokenFromLotek(); + const response = await this.lotekClient.get(`/gps/count`, { + params: { + deviceId: query.deviceId, + dtStart: query.dtStart, + dtEnd: query.dtEnd + }, + headers: { + Authorization: `Bearer ${token}` + } + }); + + // response.data = 'Number of Positions: 10' + const count = response.data.replace(/\D/g, ''); // ie: '10' + + if (!count || isNaN(Number(count))) { + throw new ApiGeneralError(`Failed to parse count from Lotek response`, [response.data]); + } + + return Number(count); + } catch (error) { + throw new ApiGeneralError('Failed to fetch device telemetry count from Lotek.', [formatAxiosError(error)]); + } + } + + /** + * Fetch telemetry data for a single device from Lotek API. + * + * @param {LotekAPIQuery} query - Lotek API request query + * @returns {Promise} Raw API telemetry data + */ + async fetchTelemetryFromLotek(query: LotekAPIQuery): Promise { + try { + const token = await this.fetchTokenFromLotek(); + // Note: Lotek is using SentenceCased keys in their API response + const response = await this.lotekClient.get(`/gps`, { + params: { + deviceId: query.deviceId, + dtStart: query.dtStart, + dtEnd: query.dtEnd + }, + headers: { + Authorization: `Bearer ${token}` + } + }); + + return response.data.map((record) => keysToLowerCase(record)); + } catch (error) { + throw new ApiGeneralError('Failed to fetch device telemetry from Lotek.', [formatAxiosError(error)]); + } + } + + /** + * Get a map of device serials to their telemetry activity statistics. + * + * @returns {Promise} The device activity map + */ + async getDevicesActivitiesMap(): Promise> { + const deviceActivityStats = await this.telemetryLotekRepository.getDeviceActivityStatistics(); + return new Map( + deviceActivityStats.map((value) => [ + value.serial, + { telemetryCount: value.telemetry_count, lastAcquisition: value.last_acquisition } + ]) + ); + } + + /** + * Batch insert telemetry data into SIMS. + * + * @param {LotekPayload[]} telemetry - List of telemetry data to create + * @param {number} [batchSize=1000] - Number of items to insert in a single batch + * @returns {Promise} The number of telemetry records created + */ + async batchCreateTelemetry(telemetry: LotekPayload[], batchSize = 1000): Promise { + const telemetryBatches = chunk(telemetry, batchSize); + + const rowCounts = await Promise.all( + telemetryBatches.map((batch) => this.telemetryLotekRepository.createLotekTelemetry(batch)) + ); + + return rowCounts.reduce((acc, count) => acc + count, 0); + } + + /** + * Process (fetch and insert) telemetry data for a list of Lotek tasks. + * + * @param {LotekTask[]} tasks - List of Lotek tasks to process + * @param {TelemetryProcessingOptions} options - Telemetry processing options + * @returns {Promise[]>} The telemetry processing results + */ + async processTelemetry( + tasks: LotekTask[], + options: TelemetryProcessingOptions + ): Promise[]> { + const activityMap = await this.getDevicesActivitiesMap(); + + return taskQueue( + tasks, + async (task: LotekTask) => { + // Track the telemetry processing state + const telemetry = { total: 0, new: 0, created: 0 }; + + // Fetch the total number of device telemetry records from Lotek API + telemetry.total = await this.fetchTelemetryCountFromLotek({ deviceId: task.serial }); + + // Get the device activity statistics from SIMS + const deviceActivity = activityMap.get(task.serial) ?? { telemetryCount: 0, lastAcquisition: null }; + + // Calculate the number of new telemetry records ie: telemetry records that are not in SIMS + telemetry.new = telemetry.total - deviceActivity?.telemetryCount; + + if (telemetry.new) { + // Fetch telemetry data from Lotek API + const lotekAPITelemetry = await this.fetchTelemetryFromLotek({ + deviceId: task.serial, + dtEnd: options.endDate, + // If no start date is provided, use the last acquisition date from SIMS + dtStart: options.startDate ?? deviceActivity?.lastAcquisition ?? undefined + }); + + // Batch insert telemetry data into SIMS + telemetry.created = await this.batchCreateTelemetry(lotekAPITelemetry, options.batchSize); + } + + defaultLog.info({ label: 'processTelemetry', ...telemetry }); + return { new: telemetry.new, created: telemetry.created }; + }, + options.concurrently + ); + } +} diff --git a/api/src/services/telemetry-services/telemetry-utils.ts b/api/src/services/telemetry-services/telemetry-utils.ts new file mode 100644 index 0000000000..21b49eb48b --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-utils.ts @@ -0,0 +1,43 @@ +interface IDeviceKey { + /** + * Device vendor / manufacturer. + * + * @example 'lotek' + * @type {string} + */ + vendor: string; + /** + * Device serial identifier. + * + * @example 'a123' || 12345 + * @type {string | number} + */ + serial: string | number; +} + +/** + * Generate a device key from a telemetry vendor and device serial. + * + * Note: In the database this value is used as psuedo foreign key from `telemetry` to `device`. + * + * @example 'lotek:1234' + * + * @param {{vendor: string; serial: string}} params - Vendor and serial + * @returns {string} + */ +export const getTelemetryDeviceKey = ({ vendor, serial }: IDeviceKey): string => { + return `${vendor.trim().toLowerCase()}:${String(serial).trim().toLowerCase()}`; +}; + +/** + * Convert an object's keys to lowercase. + * + * @param {Record} obj - Object to convert + * @returns {Record} - Object with lowercase keys + */ +export const keysToLowerCase = (obj: Record): T => { + return Object.keys(obj).reduce((acc, key) => { + acc[key.toLowerCase()] = obj[key]; + return acc; + }, {} as T); +}; diff --git a/api/src/services/telemetry-services/telemetry-vectronic-service.interface.ts b/api/src/services/telemetry-services/telemetry-vectronic-service.interface.ts new file mode 100644 index 0000000000..7dceb7d709 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-vectronic-service.interface.ts @@ -0,0 +1,53 @@ +/** + * Raw Vectronic API telemetry record. + * + * @see https://api.vectronic-wildlife.com/swagger-ui/index.html?configUrl=/v3/api-docs/swagger-config#//getPositions_2 + */ +export interface TelemetryVectronicAPIRecord { + idPosition: number; + idCollar: number; + acquisitionTime: string | null; + scts: string | null; + originCode: string | null; + ecefX: number | null; + ecefY: number | null; + ecefZ: number | null; + latitude: number | null; + longitude: number | null; + height: number | null; + dop: number | null; + idFixType: number | null; + positionError: number | null; + satCount: number | null; + ch01SatId: number | null; + ch01SatCnr: number | null; + ch02SatId: number | null; + ch02SatCnr: number | null; + ch03SatId: number | null; + ch03SatCnr: number | null; + ch04SatId: number | null; + ch04SatCnr: number | null; + ch05SatId: number | null; + ch05SatCnr: number | null; + ch06SatId: number | null; + ch06SatCnr: number | null; + ch07SatId: number | null; + ch07SatCnr: number | null; + ch08SatId: number | null; + ch08SatCnr: number | null; + ch09SatId: number | null; + ch09SatCnr: number | null; + ch10SatId: number | null; + ch10SatCnr: number | null; + ch11SatId: number | null; + ch11SatCnr: number | null; + ch12SatId: number | null; + ch12SatCnr: number | null; + idMortalityStatus: number; + activity: number | null; + mainVoltage: number; + backupVoltage: number; + temperature: number; + transformedX: number | null; + transformedY: number | null; +} diff --git a/api/src/services/telemetry-services/telemetry-vectronic-service.test.ts b/api/src/services/telemetry-services/telemetry-vectronic-service.test.ts new file mode 100644 index 0000000000..c2b55baae1 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-vectronic-service.test.ts @@ -0,0 +1,223 @@ +import chai, { expect } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as env from '../../utils/env-config'; +import { getMockDBConnection } from '../../__mocks__/db'; +import { TelemetryVectronicService } from './telemetry-vectronic-service'; + +chai.use(sinonChai); + +describe('TelemetryVectronicService', () => { + beforeEach(() => { + sinon.restore(); + }); + + describe('vectronicClient', () => { + it('should create a new Axios client with the correct base URL', () => { + sinon.stub(env, 'getEnvironmentVariable').returns('https://api.vectronic-wildlife.com/v2'); + + const service = new TelemetryVectronicService(getMockDBConnection()); + + expect(service.vectronicClient.defaults.baseURL).to.equal('https://api.vectronic-wildlife.com/v2'); + }); + }); + + describe('fetchTelemetryFromVectronic', () => { + it('should fetch telemetry data from the Vectronic API', async () => { + const mockAxiosResponse = { + data: [ + { + KEY_A: 'valueA' + } + ] + }; + + const service = new TelemetryVectronicService(getMockDBConnection()); + const axiosStub = sinon.stub(service.vectronicClient, 'get').resolves(mockAxiosResponse); + + const query = { + idcollar: 1, + collarkey: 'test-collar-key', + beforeAcquisition: '2021-01-01', + afterAcquisition: '2021-01-01', + gtId: 1 + }; + + const telemetry = await service.fetchTelemetryFromVectronic(query); + + expect(axiosStub).to.have.been.calledOnceWithExactly('/collar/1/gps', { + params: { + collarkey: 'test-collar-key', + beforeAcquisition: '2021-01-01', + afterAcquisition: '2021-01-01', + ['gt-id']: 1 + } + }); + + expect(telemetry).to.deep.equal([{ key_a: 'valueA' }]); + }); + }); + + describe('fetchTelemetryCountFromVectronic', () => { + it('should fetch telemetry count from the Vectronic API', async () => { + const mockAxiosResponse = { + data: 1 + }; + + const service = new TelemetryVectronicService(getMockDBConnection()); + const axiosStub = sinon.stub(service.vectronicClient, 'get').resolves(mockAxiosResponse); + + const query = { + idcollar: 1, + collarkey: 'test-collar-key', + beforeAcquisition: '2021-01-01', + afterAcquisition: '2021-01-01', + gtId: 1 + }; + + const telemetry = await service.fetchTelemetryCountFromVectronic(query); + + expect(axiosStub).to.have.been.calledOnceWithExactly('/collar/1/gps/count', { + params: { + collarkey: 'test-collar-key', + beforeAcquisition: '2021-01-01', + afterAcquisition: '2021-01-01', + ['gt-id']: 1 + } + }); + + expect(telemetry).to.equal(1); + }); + }); + + describe('getDeviceCredentials', () => { + it('should fetch Vectronic device credentials', async () => { + const service = new TelemetryVectronicService(getMockDBConnection()); + const repoStub = sinon + .stub(service.telemetryVectronicRepository, 'getAllVectronicCredentials') + .resolves([true] as any); + + const credentials = await service.getDeviceCredentials(); + + expect(repoStub).to.have.been.calledOnceWithExactly(); + expect(credentials).to.deep.equal([true]); + }); + }); + + describe('batchCreateTelemetry', () => { + it('should batch insert telemetry data', async () => { + const service = new TelemetryVectronicService(getMockDBConnection()); + const telemetry = [{ id: 1 }, { id: 2 }]; + + const repoStub = sinon.stub(service.telemetryVectronicRepository, 'createVectronicTelemetry').resolves(1); + + const inserted = await service.batchCreateTelemetry(telemetry as any, 1); + + expect(repoStub.getCall(0)).to.have.been.calledWithExactly([{ id: 1 }]); + expect(repoStub.getCall(1)).to.have.been.calledWithExactly([{ id: 2 }]); + expect(repoStub).to.have.been.calledTwice; + expect(inserted).to.deep.equal(2); + }); + }); + + describe('getDevicesActivityMap', () => { + it('should return a map of device activity', async () => { + const service = new TelemetryVectronicService(getMockDBConnection()); + const repoStub = sinon + .stub(service.telemetryVectronicRepository, 'getDeviceActivityStatistics') + .resolves([{ serial: 1, telemetry_count: 2, max_idposition: 2 }] as any); + + const activity = await service.getDevicesActivitiesMap(); + + expect(repoStub).to.have.been.calledOnceWithExactly(); + expect(activity).to.deep.equal(new Map([[1, { telemetryCount: 2, maxIdposition: 2 }]])); + }); + }); + + describe('processTelemetry', () => { + it('should not fetch telemetry when Vectronic count is equal to SIMS count', async () => { + const service = new TelemetryVectronicService(getMockDBConnection()); + const tasks = [ + { serial: 1, key: 'key1' }, + { serial: 2, key: 'key2' } + ]; + + const activityStub = sinon.stub(service, 'getDevicesActivitiesMap'); + const countStub = sinon.stub(service, 'fetchTelemetryCountFromVectronic'); + const telemetryStub = sinon.stub(service, 'fetchTelemetryFromVectronic'); + + activityStub.resolves( + new Map([ + [1, { telemetryCount: 2, maxIdposition: 2 }], + [2, { telemetryCount: 2, maxIdposition: 2 }] + ]) + ); + countStub.resolves(2); + + const results = await service.processTelemetry(tasks, { concurrently: 1, batchSize: 1 }); + + expect(activityStub).to.have.been.calledOnce; + + expect(countStub.getCall(0)).to.have.been.calledWithExactly({ idcollar: 1, collarkey: 'key1' }); + expect(countStub.getCall(1)).to.have.been.calledWithExactly({ idcollar: 2, collarkey: 'key2' }); + + expect(telemetryStub).not.to.have.been.called; + expect(results).to.deep.equal([ + { + task: { serial: 1, key: 'key1' }, + value: { new: 0, created: 0 } + }, + { + task: { serial: 2, key: 'key2' }, + value: { new: 0, created: 0 } + } + ]); + }); + + it('should fetch telemetry when Vectronic count is more than SIMS count', async () => { + const service = new TelemetryVectronicService(getMockDBConnection()); + const tasks = [ + { serial: 1, key: 'key1' }, + { serial: 2, key: 'key2' } + ]; + + const activityStub = sinon.stub(service, 'getDevicesActivitiesMap'); + const countStub = sinon.stub(service, 'fetchTelemetryCountFromVectronic'); + const telemetryStub = sinon.stub(service, 'fetchTelemetryFromVectronic'); + const createStub = sinon.stub(service, 'batchCreateTelemetry'); + + activityStub.resolves( + new Map([ + [1, { telemetryCount: 2, maxIdposition: 3 }], + [2, { telemetryCount: 2, maxIdposition: 3 }] + ]) + ); + countStub.resolves(3); + + await service.processTelemetry(tasks, { concurrently: 1, batchSize: 1 }); + + expect(activityStub).to.have.been.calledOnce; + + expect(countStub.getCall(0)).to.have.been.calledWithExactly({ idcollar: 1, collarkey: 'key1' }); + expect(countStub.getCall(1)).to.have.been.calledWithExactly({ idcollar: 2, collarkey: 'key2' }); + + expect(telemetryStub.getCall(0)).to.have.been.calledWithExactly({ + idcollar: 1, + collarkey: 'key1', + afterAcquisition: undefined, + beforeAcquisition: undefined, + gtId: 3 + }); + + expect(telemetryStub.getCall(1)).to.have.been.calledWithExactly({ + idcollar: 2, + collarkey: 'key2', + afterAcquisition: undefined, + beforeAcquisition: undefined, + gtId: 3 + }); + + expect(createStub).to.have.been.calledTwice; + }); + }); +}); diff --git a/api/src/services/telemetry-services/telemetry-vectronic-service.ts b/api/src/services/telemetry-services/telemetry-vectronic-service.ts new file mode 100644 index 0000000000..992660d69b --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-vectronic-service.ts @@ -0,0 +1,193 @@ +import axios, { AxiosInstance } from 'axios'; +import { chunk } from 'lodash'; +import qs from 'qs'; +import { TelemetryCredentialVectronicRecord } from '../../database-models/telemetry_credential_vectronic'; +import { IDBConnection } from '../../database/db'; +import { ApiGeneralError } from '../../errors/api-error'; +import { formatAxiosError } from '../../errors/axios-error'; +import { TelemetryVectronicRepository } from '../../repositories/telemetry-repositories/telemetry-vectronic-repository'; +import { + VectronicAPIQuery, + VectronicPayload, + VectronicTask +} from '../../repositories/telemetry-repositories/telemetry-vectronic-repository.interface'; +import { getEnvironmentVariable } from '../../utils/env-config'; +import { getLogger } from '../../utils/logger'; +import { QueueResult, taskQueue } from '../../utils/task-queue'; +import { DBService } from '../db-service'; +import { keysToLowerCase } from './telemetry-utils'; +import { TelemetryProcessingOptions, TelemetryProcessingResult } from './telemetry.interface'; +const defaultLog = getLogger('telemetry-vectronic-service'); + +/** + * This service is responsible for fetching telemetry data from the Vectronic API and storing it in SIMS. + * + * @see https://api.vectronic-wildlife.com/swagger-ui/index.html?configUrl=/v3/api-docs/swagger-config + * + * @export + * @class TelemetryVendorService + * @extends {DBService} + */ +export class TelemetryVectronicService extends DBService { + vectronicClient: AxiosInstance; + + telemetryVectronicRepository: TelemetryVectronicRepository; + + /** + * Creates an instance of TelemetryVectronicService. + * + * @param {IDBConnection} connection + */ + constructor(connection: IDBConnection) { + super(connection); + + this.vectronicClient = axios.create({ + paramsSerializer: (params) => qs.stringify(params), + baseURL: getEnvironmentVariable('VECTRONIC_API_HOST') + }); + + this.telemetryVectronicRepository = new TelemetryVectronicRepository(connection); + } + + /** + * Fetch vectronic device telemetry data from the Vectronic API. + * + * @param {VectronicAPIQuery} query - Vectronic API request query + * @returns {Promise} + */ + async fetchTelemetryFromVectronic(query: VectronicAPIQuery): Promise { + try { + // Note: Vectronic is using SentenceCased keys in their API response + const response = await this.vectronicClient.get(`/collar/${query.idcollar}/gps`, { + params: { + collarkey: query.collarkey, + beforeAcquisition: query.beforeAcquisition, + afterAcquisition: query.afterAcquisition, + ['gt-id']: query.gtId + } + }); + + return response.data.map((record) => keysToLowerCase(record)); + } catch (error) { + throw new ApiGeneralError('Failed to fetch devices from Vectronic.', [formatAxiosError(error)]); + } + } + + /** + * Fetch vectronic device telemetry count from the Vectronic API. + * + * @param {VectronicAPIQuery} query - Vectronic API request query + * @returns {*} {Promise} + */ + async fetchTelemetryCountFromVectronic(query: VectronicAPIQuery): Promise { + try { + const response = await this.vectronicClient.get(`/collar/${query.idcollar}/gps/count`, { + params: { + collarkey: query.collarkey, + beforeAcquisition: query.beforeAcquisition, + afterAcquisition: query.afterAcquisition, + ['gt-id']: query.gtId + } + }); + + return response.data; + } catch (error) { + throw new ApiGeneralError('Failed to fetch device count from Vectronic.', [formatAxiosError(error)]); + } + } + + /** + * Get all Vectronic credentials from SIMS. + * + * @returns {*} {Promise} + */ + async getDeviceCredentials(): Promise { + return this.telemetryVectronicRepository.getAllVectronicCredentials(); + } + + /** + * Create Vectronic telemetry records in SIMS in batches. + * + * @param {VectronicPayload} telemetry - Vectronic telemetry records + * @returns {Promise} + */ + async batchCreateTelemetry(telemetry: VectronicPayload[], batchSize = 1000): Promise { + const telemetryBatches = chunk(telemetry, batchSize); + + // Insert telemetry data in batches + const rowCounts = await Promise.all( + telemetryBatches.map((batch) => this.telemetryVectronicRepository.createVectronicTelemetry(batch)) + ); + + // Return the total number of inserted rows + return rowCounts.reduce((acc, count) => acc + count, 0); + } + + /** + * Get a map of device serials to their telemetry activity statistics. + * + * @returns {*} {Promise} The device activity map + */ + async getDevicesActivitiesMap(): Promise> { + const deviceActivityStats = await this.telemetryVectronicRepository.getDeviceActivityStatistics(); + return new Map( + deviceActivityStats.map((value) => [ + value.serial, + { telemetryCount: value.telemetry_count, maxIdposition: value.max_idposition } + ]) + ); + } + + /** + * Process (fetch and insert) telemetry data for a list of Vectronic tasks. + * + * @param {VectronicTask[]} tasks - List of Vectronic tasks + * @param {TelemetryProcessingOptions} options - Telemetry processing options + * @returns {*} {Promise[]>} + */ + async processTelemetry( + tasks: VectronicTask[], + options: TelemetryProcessingOptions + ): Promise[]> { + const activityMap = await this.getDevicesActivitiesMap(); + + return taskQueue( + tasks, + async (task: VectronicTask) => { + // Track the telemetry processing state + const telemetry = { total: 0, new: 0, created: 0 }; + + // Fetch the total number of device telemetry records from Vectronic API + telemetry.total = await this.fetchTelemetryCountFromVectronic({ + idcollar: task.serial, + collarkey: task.key + }); + + // Get the device activity statistics from SIMS + const deviceActivity = activityMap.get(task.serial) ?? { telemetryCount: 0, maxIdposition: null }; + + // Calculate the number of new telemetry records ie: telemetry records that are not in SIMS + telemetry.new = telemetry.total - deviceActivity.telemetryCount; + + if (telemetry.new) { + // Fetch telemetry data from Vectronic API + const vectronicTelemetry = await this.fetchTelemetryFromVectronic({ + idcollar: task.serial, + collarkey: task.key, + afterAcquisition: options.startDate, + beforeAcquisition: options.endDate, + // If no start date provided, use the largest idposition from SIMS + gtId: options.startDate ? undefined : deviceActivity.maxIdposition ?? undefined + }); + + // Batch insert telemetry data into SIMS + telemetry.created = await this.batchCreateTelemetry(vectronicTelemetry, options.batchSize); + } + + defaultLog.info({ label: 'processTelemetry', ...telemetry }); + return { new: telemetry.new, created: telemetry.created }; + }, + options.concurrently + ); + } +} diff --git a/api/src/services/telemetry-services/telemetry-vendor-service.test.ts b/api/src/services/telemetry-services/telemetry-vendor-service.test.ts new file mode 100644 index 0000000000..82f23da3fd --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-vendor-service.test.ts @@ -0,0 +1,459 @@ +import chai, { expect } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { ExtendedDeploymentRecord } from '../../repositories/telemetry-repositories/telemetry-deployment-repository.interface'; +import { TelemetryManualRepository } from '../../repositories/telemetry-repositories/telemetry-manual-repository'; +import { TelemetryVendorRepository } from '../../repositories/telemetry-repositories/telemetry-vendor-repository'; +import { + Telemetry, + TelemetrySpatial, + TelemetryVendorEnum +} from '../../repositories/telemetry-repositories/telemetry-vendor-repository.interface'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { getMockDBConnection } from '../../__mocks__/db'; +import { TelemetryDeploymentService } from './telemetry-deployment-service'; +import { TelemetryVendorService } from './telemetry-vendor-service'; + +chai.use(sinonChai); + +describe('TelemetryVendorService', () => { + beforeEach(() => { + sinon.restore(); + }); + + describe('constructor', () => { + it('should create a new TelemetryVendorService', () => { + const service = new TelemetryVendorService(getMockDBConnection()); + + expect(service.vendorRepository).to.be.an.instanceOf(TelemetryVendorRepository); + expect(service.connection).to.exist; + }); + }); + + describe('getTelemetryForDeployment', () => { + it('should return telemetry data for a single deployment', async () => { + const mockDBConnection = getMockDBConnection(); + const repoStub = sinon.stub(TelemetryVendorRepository.prototype, 'getTelemetryByDeploymentIds').resolves([]); + + const service = new TelemetryVendorService(mockDBConnection); + + const data = await service.getTelemetryForDeployment(1, 1); + + expect(repoStub).to.have.been.calledWith(1, [1], undefined); + expect(data).to.deep.equal([]); + }); + }); + + describe('getTelemetryForDeployments', () => { + it('should return telemetry data for a single deployment', async () => { + const mockDBConnection = getMockDBConnection(); + const repoStub = sinon.stub(TelemetryVendorRepository.prototype, 'getTelemetryByDeploymentIds').resolves([]); + + const service = new TelemetryVendorService(mockDBConnection); + + const data = await service.getTelemetryForDeployments(1, [1, 2]); + + expect(repoStub).to.have.been.calledWith(1, [1, 2], undefined); + expect(data).to.deep.equal([]); + }); + }); + + describe('getTelemetryForCritter', () => { + it('should return telemetry data for a single critter', async () => { + const mockDBConnection = getMockDBConnection(); + const repoStub = sinon.stub(TelemetryVendorRepository.prototype, 'getTelemetryByDeploymentIds').resolves([]); + + const service = new TelemetryVendorService(mockDBConnection); + + const deploymentServiceStub = sinon + .stub(service.deploymentService, 'getDeploymentsForCritterId') + .resolves([{ deployment_id: 8 } as any]); + + const data = await service.getTelemetryForCritter(1, 1); + + expect(deploymentServiceStub).to.have.been.calledWith(1, 1); + expect(repoStub).to.have.been.calledWith(1, [8], undefined); + expect(data).to.deep.equal([]); + }); + }); + + describe('getTelemetryForSurvey', () => { + describe('with pagination', () => { + it('should return telemetry data for a survey', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockTelemetry = [ + { + telemetry_id: '123-456-789', + deployment_id: 8, + critter_id: 3, + vendor: TelemetryVendorEnum.VECTRONIC, + serial: '123456', + acquisition_date: '2021-01-01T00:00:00.000Z', + latitude: -49, + longitude: 125, + elevation: null, + temperature: null + } + ]; + const getTelemetryByDeploymentIdsStub = sinon + .stub(TelemetryVendorRepository.prototype, 'getTelemetryByDeploymentIds') + .resolves(mockTelemetry); + + const mockCount = 1; + const getTelemetryCountByDeploymentIdsStub = sinon + .stub(TelemetryVendorRepository.prototype, 'getTelemetryCountByDeploymentIds') + .resolves(mockCount); + + const service = new TelemetryVendorService(mockDBConnection); + + const deploymentServiceStub = sinon + .stub(service.deploymentService, 'getDeploymentsForSurvey') + .resolves([{ deployment_id: 8 } as any]); + + const surveyId = 1; + const pagination: ApiPaginationOptions = { page: 1, limit: 10 }; + + const data = await service.getTelemetryForSurvey(surveyId, { pagination }); + + expect(deploymentServiceStub).to.have.been.calledWith(surveyId); + expect(getTelemetryByDeploymentIdsStub).to.have.been.calledWith(surveyId, [8], { pagination }); + expect(getTelemetryCountByDeploymentIdsStub).to.have.been.calledWith(surveyId, [8]); + + expect(data).to.deep.equal([mockTelemetry, mockCount]); + }); + }); + + describe('without pagination', () => { + it('should return telemetry data for a survey', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockTelemetry = [ + { + telemetry_id: '123-456-789', + deployment_id: 8, + critter_id: 3, + vendor: TelemetryVendorEnum.VECTRONIC, + serial: '123456', + acquisition_date: '2021-01-01T00:00:00.000Z', + latitude: -49, + longitude: 125, + elevation: null, + temperature: null + } + ]; + const getTelemetryByDeploymentIdsStub = sinon + .stub(TelemetryVendorRepository.prototype, 'getTelemetryByDeploymentIds') + .resolves(mockTelemetry); + + const service = new TelemetryVendorService(mockDBConnection); + + const deploymentServiceStub = sinon + .stub(service.deploymentService, 'getDeploymentsForSurvey') + .resolves([{ deployment_id: 8 } as any]); + + const surveyId = 1; + const pagination = undefined; + + const data = await service.getTelemetryForSurvey(surveyId, pagination); + + expect(deploymentServiceStub).to.have.been.calledWith(surveyId); + expect(getTelemetryByDeploymentIdsStub).to.have.been.calledWith(surveyId, [8], undefined); + expect(data).to.deep.equal([mockTelemetry, 1]); + }); + }); + }); + + describe('getTelemetrySpatialForSurvey', () => { + it('should return telemetry data for a survey', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockDeployment: ExtendedDeploymentRecord[] = [ + { + deployment_id: 1, + survey_id: 2, + critter_id: 3, + device_id: 4, + device_key: 'lotek:123456', + frequency: 123, + frequency_unit_id: 1, + attachment_start_date: '2021-01-01', + attachment_start_time: '00:00:00', + attachment_start_timestamp: '2021-01-01T00:00:00.000Z', + attachment_end_date: '2021-01-01', + attachment_end_time: '00:00:00', + attachment_end_timestamp: '2021-01-01T00:00:00.000Z', + critterbase_start_capture_id: null, + critterbase_end_capture_id: null, + critterbase_end_mortality_id: null, + serial: '1234', + device_make_id: 1, + model: 'V2', + critterbase_critter_id: '1111111111' + } + ]; + + const mockTelemetry: TelemetrySpatial[] = [ + { + telemetry_id: '22222222222', + geometry: { + type: 'Point', + coordinates: [125, -49] + } + } + ]; + + const getDeploymentsForSurveyIdStub = sinon + .stub(TelemetryDeploymentService.prototype, 'getDeploymentsForSurvey') + .resolves(mockDeployment); + + const getTelemetrySpatialByDeploymentIdsStub = sinon + .stub(TelemetryVendorRepository.prototype, 'getTelemetrySpatialByDeploymentIds') + .resolves(mockTelemetry); + + const surveyId = 1; + + const service = new TelemetryVendorService(mockDBConnection); + const data = await service.getTelemetrySpatialForSurvey(surveyId); + + expect(getDeploymentsForSurveyIdStub).to.have.been.calledWith(surveyId); + expect(getTelemetrySpatialByDeploymentIdsStub).to.have.been.calledWith(surveyId, [1]); + + expect(data).to.eql([mockTelemetry, 1]); + }); + }); + + describe('getTelemetryRecordById', () => { + it('should return telemetry data for a survey', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockTelemetry: Telemetry = { + telemetry_id: '123-456-789', + deployment_id: 8, + critter_id: 3, + vendor: TelemetryVendorEnum.VECTRONIC, + serial: '123456', + acquisition_date: '2021-01-01T00:00:00.000Z', + latitude: -49, + longitude: 125, + elevation: null, + temperature: null + }; + + const getTelemetryRecordByIdStub = sinon + .stub(TelemetryVendorRepository.prototype, 'getTelemetryRecordById') + .resolves(mockTelemetry); + + const surveyId = 1; + const telemetryId = '22222222222'; + + const service = new TelemetryVendorService(mockDBConnection); + const data = await service.getTelemetryRecordById(surveyId, telemetryId); + + expect(getTelemetryRecordByIdStub).to.have.been.calledWith(surveyId, telemetryId); + + expect(data).to.eql(mockTelemetry); + }); + }); + + describe('findTelemetry', () => { + it('should return telemetry data for a survey', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockTelemetry: Telemetry[] = [ + { + telemetry_id: '123-456-789', + deployment_id: 8, + critter_id: 3, + vendor: TelemetryVendorEnum.VECTRONIC, + serial: '123456', + acquisition_date: '2021-01-01T00:00:00.000Z', + latitude: -49, + longitude: 125, + elevation: null, + temperature: null + } + ]; + + const findTelemetryStub = sinon + .stub(TelemetryVendorRepository.prototype, 'findTelemetry') + .resolves(mockTelemetry); + + const isUserAdmin = false; + const systemUserId = null; + const filterFields = { + keyword: 'lotek' + }; + const pagination = { + page: 1, + limit: 10 + }; + + const service = new TelemetryVendorService(mockDBConnection); + const data = await service.findTelemetry(isUserAdmin, systemUserId, filterFields, pagination); + + expect(findTelemetryStub).to.have.been.calledWith(isUserAdmin, systemUserId, filterFields, pagination); + + expect(data).to.eql(mockTelemetry); + }); + }); + + describe('findTelemetryCount', () => { + it('should return telemetry data for a survey', async () => { + const mockDBConnection = getMockDBConnection(); + + const findTelemetryCountStub = sinon.stub(TelemetryVendorRepository.prototype, 'findTelemetryCount').resolves(1); + + const isUserAdmin = false; + const systemUserId = null; + const filterFields = { + keyword: 'lotek' + }; + + const service = new TelemetryVendorService(mockDBConnection); + const data = await service.findTelemetryCount(isUserAdmin, systemUserId, filterFields); + + expect(findTelemetryCountStub).to.have.been.calledWith(isUserAdmin, systemUserId, filterFields); + + expect(data).to.equal(1); + }); + }); + + describe('bulkCreateManualTelemetry', () => { + it('should create manual telemetry records', async () => { + const mockDBConnection = getMockDBConnection(); + const service = new TelemetryVendorService(mockDBConnection); + + const repoStub = sinon.stub(TelemetryManualRepository.prototype, 'bulkCreateManualTelemetry'); + const validateStub = sinon.stub(service.deploymentService, 'getDeploymentsForSurvey').resolves([true] as any); + + await service.bulkCreateManualTelemetry(1, [ + { + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]); + + expect(validateStub).to.have.been.calledWith(1, [1]); + expect(repoStub).to.have.been.calledWith([ + { + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]); + }); + + it('should throw error when survey missing reference to one or many deployment IDs', async () => { + const mockDBConnection = getMockDBConnection(); + const service = new TelemetryVendorService(mockDBConnection); + + sinon.stub(service.deploymentService, 'getDeploymentsForSurvey').resolves([]); + + try { + await service.bulkCreateManualTelemetry(1, [ + { + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]); + expect.fail(); + } catch (error: any) { + expect(error.message).to.equal('Failed to create manual telemetry'); + } + }); + }); + + describe('bulkUpdateManualTelemetry', () => { + it('should update manual telemetry records', async () => { + const mockDBConnection = getMockDBConnection(); + const service = new TelemetryVendorService(mockDBConnection); + + const repoStub = sinon.stub(TelemetryManualRepository.prototype, 'bulkUpdateManualTelemetry'); + const validateStub = sinon.stub(service.manualRepository, 'getManualTelemetryByIds').resolves([true] as any); + + await service.bulkUpdateManualTelemetry(1, [ + { + telemetry_manual_id: '09556e24-153b-4dbb-add6-f00e74131e48', + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]); + + expect(validateStub).to.have.been.calledWith(1, ['09556e24-153b-4dbb-add6-f00e74131e48']); + expect(repoStub).to.have.been.calledWith([ + { + telemetry_manual_id: '09556e24-153b-4dbb-add6-f00e74131e48', + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]); + }); + + it('should throw error when survey missing reference to one or many telemetry manual IDs', async () => { + const mockDBConnection = getMockDBConnection(); + const service = new TelemetryVendorService(mockDBConnection); + + sinon.stub(service.manualRepository, 'getManualTelemetryByIds').resolves([]); + + try { + await service.bulkUpdateManualTelemetry(1, [ + { + telemetry_manual_id: '09556e24-153b-4dbb-add6-f00e74131e48', + deployment_id: 1, + latitude: 1, + longitude: 1, + acquisition_date: '2021-01-01', + transmission_date: '2021-01-01' + } + ]); + expect.fail(); + } catch (error: any) { + expect(error.message).to.equal('Failed to update manual telemetry'); + } + }); + }); + + describe('bulkDeleteManualTelemetry', () => { + it('should update manual telemetry records', async () => { + const mockDBConnection = getMockDBConnection(); + const service = new TelemetryVendorService(mockDBConnection); + + const repoStub = sinon.stub(TelemetryManualRepository.prototype, 'bulkDeleteManualTelemetry'); + const validateStub = sinon.stub(service.manualRepository, 'getManualTelemetryByIds').resolves([true] as any); + + await service.bulkDeleteManualTelemetry(1, ['09556e24-153b-4dbb-add6-f00e74131e48']); + + expect(validateStub).to.have.been.calledWith(1, ['09556e24-153b-4dbb-add6-f00e74131e48']); + expect(repoStub).to.have.been.calledWith(['09556e24-153b-4dbb-add6-f00e74131e48']); + }); + + it('should throw error when survey missing reference to one or many telemetry manual IDs', async () => { + const mockDBConnection = getMockDBConnection(); + const service = new TelemetryVendorService(mockDBConnection); + + sinon.stub(service.manualRepository, 'getManualTelemetryByIds').resolves([]); + + try { + await service.bulkDeleteManualTelemetry(1, ['09556e24-153b-4dbb-add6-f00e74131e48']); + expect.fail(); + } catch (error: any) { + expect(error.message).to.equal('Failed to delete manual telemetry'); + } + }); + }); +}); diff --git a/api/src/services/telemetry-services/telemetry-vendor-service.ts b/api/src/services/telemetry-services/telemetry-vendor-service.ts new file mode 100644 index 0000000000..b046fb2d72 --- /dev/null +++ b/api/src/services/telemetry-services/telemetry-vendor-service.ts @@ -0,0 +1,237 @@ +import { TelemetryManualRecord } from '../../database-models/telemetry_manual'; +import { IDBConnection } from '../../database/db'; +import { ApiGeneralError } from '../../errors/api-error'; +import { IAllTelemetryAdvancedFilters } from '../../models/telemetry-view'; +import { TelemetryManualRepository } from '../../repositories/telemetry-repositories/telemetry-manual-repository'; +import { CreateManualTelemetry } from '../../repositories/telemetry-repositories/telemetry-manual-repository.interface'; +import { TelemetryVendorRepository } from '../../repositories/telemetry-repositories/telemetry-vendor-repository'; +import { + Telemetry, + TelemetryOptions, + TelemetrySpatial +} from '../../repositories/telemetry-repositories/telemetry-vendor-repository.interface'; +import { ApiPaginationOptions } from '../../zod-schema/pagination'; +import { DBService } from '../db-service'; +import { TelemetryDeploymentService } from './telemetry-deployment-service'; + +/** + * A service class for working with telemetry vendor data. + * + * @export + * @class TelemetryVendorService + * @extends {DBService} + */ +export class TelemetryVendorService extends DBService { + vendorRepository: TelemetryVendorRepository; + manualRepository: TelemetryManualRepository; + + deploymentService: TelemetryDeploymentService; + + constructor(connection: IDBConnection) { + super(connection); + + // Telemetry repositories + this.vendorRepository = new TelemetryVendorRepository(connection); + this.manualRepository = new TelemetryManualRepository(connection); + + // Services + this.deploymentService = new TelemetryDeploymentService(connection); + } + + /** + * Get telemetry data for a single deployment. + * + * @async + * @param {number} surveyId + * @param {number} deploymentId + * @param {TelemetryOptions} [options] - Telemetry options + * @returns {Promise} + */ + async getTelemetryForDeployment( + surveyId: number, + deploymentId: number, + options?: TelemetryOptions + ): Promise { + return this.vendorRepository.getTelemetryByDeploymentIds(surveyId, [deploymentId], options); + } + + /** + * Get telemetry data for a list of deployments. + * + * @async + * @param {number} surveyId + * @param {number[]} deploymentIds + * @param {TelemetryOptions} [options] - Telemetry options + * @returns {Promise} + */ + async getTelemetryForDeployments( + surveyId: number, + deploymentIds: number[], + options?: TelemetryOptions + ): Promise { + return this.vendorRepository.getTelemetryByDeploymentIds(surveyId, deploymentIds, options); + } + + /** + * Get telemetry data for a critter. + * + * @async + * @param {number} surveyId + * @param {number} critterId + * @param {TelemetryOptions} [options] - Telemetry options + * @returns {Promise} + */ + async getTelemetryForCritter(surveyId: number, critterId: number, options?: TelemetryOptions): Promise { + const deployments = await this.deploymentService.getDeploymentsForCritterId(surveyId, critterId); + const deploymentIds = deployments.map((deployment) => deployment.deployment_id); + + return this.vendorRepository.getTelemetryByDeploymentIds(surveyId, deploymentIds, options); + } + + /** + * Get paginated telemetry data for a survey. + * + * @async + * @param {number} surveyId + * @param {TelemetryOptions} [options] - Telemetry options + * @returns {Promise<[Telemetry[], number]>} Tuple of telemetry data and total count + */ + async getTelemetryForSurvey(surveyId: number, options?: TelemetryOptions): Promise<[Telemetry[], number]> { + const deployments = await this.deploymentService.getDeploymentsForSurvey(surveyId); + const deploymentIds = deployments.map((deployment) => deployment.deployment_id); + + if (!options?.pagination) { + const telemetry = await this.vendorRepository.getTelemetryByDeploymentIds(surveyId, deploymentIds, options); + return [telemetry, telemetry.length]; + } + + return Promise.all([ + this.vendorRepository.getTelemetryByDeploymentIds(surveyId, deploymentIds, options), + this.vendorRepository.getTelemetryCountByDeploymentIds(surveyId, deploymentIds) + ]); + } + + /** + * Get telemetry spatial data for a survey. + * + * @async + * @param {number} surveyId + * @return {Promise<[TelemetrySpatial[], number]>} - A tuple containing the telemetry spatial data and the total count + */ + async getTelemetrySpatialForSurvey(surveyId: number): Promise<[TelemetrySpatial[], number]> { + const deployments = await this.deploymentService.getDeploymentsForSurvey(surveyId); + const deploymentIds = deployments.map((deployment) => deployment.deployment_id); + + const telemetry = await this.vendorRepository.getTelemetrySpatialByDeploymentIds(surveyId, deploymentIds); + return [telemetry, telemetry.length]; + } + + async getTelemetryRecordById(surveyId: number, telemetryId: string): Promise { + return this.vendorRepository.getTelemetryRecordById(surveyId, telemetryId); + } + + /** + * Retrieves the paginated list of all telemetry records that are available to the user, based on their permissions + * and provided filter criteria. + * + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @param {ApiPaginationOptions} [pagination] + * @return {*} {Promise} + * @memberof TelemetryVendorService + */ + async findTelemetry( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters, + pagination?: ApiPaginationOptions + ): Promise { + return this.vendorRepository.findTelemetry(isUserAdmin, systemUserId, filterFields, pagination); + } + + /** + * Retrieves the count of all telemetry records that are available to the user, based on their permissions and + * provided filter criteria. + * + * @param {boolean} isUserAdmin + * @param {(number | null)} systemUserId + * @param {IAllTelemetryAdvancedFilters} filterFields + * @return {*} {Promise} + * @memberof TelemetryVendorService + */ + async findTelemetryCount( + isUserAdmin: boolean, + systemUserId: number | null, + filterFields: IAllTelemetryAdvancedFilters + ): Promise { + return this.vendorRepository.findTelemetryCount(isUserAdmin, systemUserId, filterFields); + } + + /** + * Create manual telemetry records. + * + * @async + * @param {number} surveyId + * @param {CreateManualTelemetry[]} telemetry - List of manual telemetry data to create + * @returns {Promise} + */ + async bulkCreateManualTelemetry(surveyId: number, telemetry: CreateManualTelemetry[]): Promise { + const deploymentIds = [...new Set(telemetry.map((record) => record.deployment_id))]; + const deployments = await this.deploymentService.getDeploymentsForSurvey(surveyId, deploymentIds); + + if (deployments.length !== deploymentIds.length) { + throw new ApiGeneralError('Failed to create manual telemetry', [ + 'TelemetryVendorService->bulkCreateManualTelemetry', + 'survey missing reference to one or more deployment IDs' + ]); + } + + return this.manualRepository.bulkCreateManualTelemetry(telemetry); + } + + /** + * Update manual telemetry records. + * + * Note: Since this is a bulk update request, the payload must include all the properties to PUT. + * + * @async + * @param {number} surveyId + * @param {TelemetryManualRecord[]} telemetry - List of manual telemetry data to update + * @returns {Promise} + */ + async bulkUpdateManualTelemetry(surveyId: number, telemetry: TelemetryManualRecord[]): Promise { + const telemetryManualIds = telemetry.map((record) => record.telemetry_manual_id); + const manualTelemetry = await this.manualRepository.getManualTelemetryByIds(surveyId, telemetryManualIds); + + if (manualTelemetry.length !== telemetry.length) { + throw new ApiGeneralError('Failed to update manual telemetry', [ + 'TelemetryVendorService->bulkUpdateManualTelemetry', + 'survey missing reference to one or more telemetry manual IDs' + ]); + } + + return this.manualRepository.bulkUpdateManualTelemetry(telemetry); + } + + /** + * Delete manual telemetry records. + * + * @async + * @param {number} surveyId + * @param {string[]} telemetryManualIds - List of manual telemetry IDs + * @returns {Promise} + */ + async bulkDeleteManualTelemetry(surveyId: number, telemetryManualIds: string[]): Promise { + const manualTelemetry = await this.manualRepository.getManualTelemetryByIds(surveyId, telemetryManualIds); + + if (manualTelemetry.length !== telemetryManualIds.length) { + throw new ApiGeneralError('Failed to delete manual telemetry', [ + 'TelemetryVendorService->bulkDeleteManualTelemetry', + 'survey missing reference to one or more telemetry manual IDs' + ]); + } + + return this.manualRepository.bulkDeleteManualTelemetry(telemetryManualIds); + } +} diff --git a/api/src/services/telemetry-services/telemetry.interface.ts b/api/src/services/telemetry-services/telemetry.interface.ts new file mode 100644 index 0000000000..2c31166b7f --- /dev/null +++ b/api/src/services/telemetry-services/telemetry.interface.ts @@ -0,0 +1,36 @@ +export interface TelemetryProcessingOptions { + /** + * The number of telemetry records to create in a single batch. + * @type {number} + */ + batchSize: number; + /** + * The number of processes to run concurrently. + * Note: This will include fetching telemetry data and creating telemetry records. + * @type {number} + */ + concurrently: number; + /** + * The start date for fetching telemetry data. + * @type {string | undefined} - ISO 8601 date string + */ + startDate?: string; + /** + * The end date for fetching telemetry data. + * @type {string | undefined} - ISO 8601 date string + */ + endDate?: string; +} + +export interface TelemetryProcessingResult { + /** + * The difference between Lotek telemetry count and SIMS telemetry count. + * @type {number} + */ + new: number; + /** + * The number of telemetry records created in SIMS. + * @type {number} + */ + created: number; +} diff --git a/api/src/types/prettify.d.ts b/api/src/types/prettify.d.ts new file mode 100644 index 0000000000..e23dc7b1d9 --- /dev/null +++ b/api/src/types/prettify.d.ts @@ -0,0 +1,13 @@ +/** + * Prettify a complex type's hover documentation. + * + * Note: Does not change the underlying type. Only provides more readable hover documentation. + * + * @example + * Before: PersonAddress = Person & Address; + * After: PersonAddress = { name: string; street: string; } + * + * @see https://timdeschryver.dev/bits/pretty-typescript-types + */ +// eslint-disable-next-line @typescript-eslint/ban-types +type Prettify = { [K in keyof T]: T[K] } & {}; diff --git a/api/src/utils/env-config.ts b/api/src/utils/env-config.ts index 913afeedec..6e34229a49 100644 --- a/api/src/utils/env-config.ts +++ b/api/src/utils/env-config.ts @@ -56,6 +56,10 @@ export const EnvSchema = z.object({ // External Services CB_API_HOST: ZodEnvString, APP_HOST: ZodEnvString, + LOTEK_API_HOST: ZodEnvString, + LOTEK_ACCOUNT_USERNAME: ZodEnvString, + LOTEK_ACCOUNT_PASSWORD: ZodEnvString, + VECTRONIC_API_HOST: ZodEnvString, // Biohub BACKBONE_INTERNAL_API_HOST: ZodEnvString, @@ -96,12 +100,12 @@ type Env = z.infer; * * @returns {*} {Env} Validated environment variables */ -export const loadEvironmentVariables = (): Env => { +export const loadEnvironmentVariables = (): Env => { const parsed = EnvSchema.safeParse(process.env); if (!parsed.success) { defaultLog.error({ - label: 'loadEvironmentVariables', + label: 'loadEnvironmentVariables', message: 'Environment variables validation check failed', errors: parsed.error.flatten().fieldErrors }); @@ -112,6 +116,19 @@ export const loadEvironmentVariables = (): Env => { return parsed.data; }; +/** + * Get an environment variable by name. + * + * Tests can mock this function to return a specific value to prevent direct access to process.env. + * + * @template EnvKey + * @param {EnvKey} envVariable The environment variable to get + * @returns {*} {Env[EnvKey]} The environment variable value + */ +export const getEnvironmentVariable = (envVariable: EnvKey): Env[EnvKey] => { + return process.env[envVariable] as Env[EnvKey]; +}; + // Extend NodeJS ProcessEnv to include the EnvSchema declare global { // eslint-disable-next-line @typescript-eslint/no-namespace diff --git a/api/src/utils/string-utils.ts b/api/src/utils/string-utils.ts index 94f6f31dd4..94d4136d56 100644 --- a/api/src/utils/string-utils.ts +++ b/api/src/utils/string-utils.ts @@ -35,3 +35,20 @@ export function safeTrim(value: T): T { return value; } + +/** + * Given a string: + * - If the string is empty, null, or undefined, then null will be returned. + * - Otherwise, the string will be converted to a number. + * + * @export + * @param {(string | null | undefined)} value + * @return {*} {(number | null)} + */ +export function numberOrNull(value: string | null | undefined): number | null { + if (value === null || value === undefined || value === '') { + return null; + } + + return Number(value); +} diff --git a/api/src/utils/task-queue.test.ts b/api/src/utils/task-queue.test.ts new file mode 100644 index 0000000000..4caffe41e6 --- /dev/null +++ b/api/src/utils/task-queue.test.ts @@ -0,0 +1,60 @@ +import { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import { taskQueue } from './task-queue'; + +describe('taskQueue', () => { + it('should process tasks and return results', async () => { + const asyncWorker = sinon.stub(); + + asyncWorker.onCall(0).resolves(1); + asyncWorker.onCall(1).resolves(2); + + const results = await taskQueue([1, 2], asyncWorker, 2); + + expect(asyncWorker.getCall(0)).to.have.been.calledWith(1); + expect(asyncWorker.getCall(1)).to.have.been.calledWith(2); + + expect(results).to.deep.equal([ + { task: 1, value: 1 }, + { task: 2, value: 2 } + ]); + }); + + it('should process tasks and return errors', async () => { + const asyncWorker = sinon.stub(); + + asyncWorker.onCall(0).resolves(1); + asyncWorker.onCall(1).rejects(new Error('Test Error')); + + const results = await taskQueue([1, 2], asyncWorker, 2); + + expect(asyncWorker.getCall(0)).to.have.been.calledWith(1); + expect(asyncWorker.getCall(1)).to.have.been.calledWith(2); + + expect(results).to.deep.equal([ + { task: 1, value: 1 }, + { task: 2, error: new Error('Test Error') } + ]); + }); + + it('should process tasks concurrently and complete in less than 200ms', async () => { + const asyncWorker = sinon.stub(); + + asyncWorker.onCall(0).callsFake(async () => { + await new Promise((resolve) => setTimeout(resolve, 100)); + return 1; + }); + + asyncWorker.onCall(1).callsFake(async () => { + await new Promise((resolve) => setTimeout(resolve, 100)); + return 2; + }); + + const start = performance.now(); + await taskQueue([1, 2], asyncWorker, 2); + const elapsed = performance.now() - start; + + expect(elapsed).to.be.lessThan(200); + }); +}); diff --git a/api/src/utils/task-queue.ts b/api/src/utils/task-queue.ts new file mode 100644 index 0000000000..b534f4fe1c --- /dev/null +++ b/api/src/utils/task-queue.ts @@ -0,0 +1,64 @@ +import fastq, { asyncWorker } from 'fastq'; +import { getLogger } from './logger'; + +const defaultLog = getLogger('TaskQueue'); + +export type QueueResult = + | { + task: TaskType; + value: WorkerResultType; + error?: never; + } + | { + task: TaskType; + value?: never; + error: Error; + }; + +/** + * Process a list of tasks by queueing them and resolving them in parallel concurrently. + * + * Conceptually equivalent to: + * @example + * const taskArrays = [[promiseA, promiseB], [promiseC, promiseD]]; + * for (const tasks of taskArrays) { + * const results = Promise.all(tasks) + * } + * + * @template TaskType - The type of the tasks to process via `asyncWorker` + * @template WorkerResultType - The type of the resolved value from `asyncWorker` + * @param {TaskType[]} tasks - The tasks to process + * @param {asyncWorker} asyncWorker - The worker function that processes each task + * @param {number} concurrently - The number of tasks to process concurrently + * @returns {Promise[]>} + */ +export const taskQueue = async ( + tasks: TaskType[], + asyncWorker: asyncWorker, + concurrently: number +): Promise[]> => { + const start = performance.now(); + const results: QueueResult[] = []; // The resolved values are pushed into this array + + const queue = fastq.promise(asyncWorker, concurrently); + + // 1. Queue the tasks + for (const task of tasks) { + // 2. Push each task into the queue and handle the resolved value or error + queue + .push(task) + .then((value) => results.push({ task, value })) + .catch((error) => results.push({ task, error })); // Catch errors and push into results + } + + // 4. Wait for the queue to drain (all tasks to complete) + // WARNING: Use `queue.drainED()` not `queue.drain()`. + // The latter will not wait for the tasks to complete. + await queue.drained(); + + defaultLog.info({ + message: `Completed ${tasks.length} tasks in ${((performance.now() - start) / 1000).toFixed(3)}s.` + }); + + return results; +}; diff --git a/api/src/utils/xlsx-utils/worksheet-utils.ts b/api/src/utils/xlsx-utils/worksheet-utils.ts index 38812641ae..02b5e107df 100644 --- a/api/src/utils/xlsx-utils/worksheet-utils.ts +++ b/api/src/utils/xlsx-utils/worksheet-utils.ts @@ -23,7 +23,7 @@ export interface IXLSXCSVColumn { * * time: HH:mm:ss */ - type: 'string' | 'number' | 'date' | 'code'; + type: 'string' | 'number' | 'date' | 'code' | 'stringOrNumber'; /** * Allowed aliases / mappings for column headers. * @@ -268,6 +268,10 @@ export const validateWorksheetColumnTypes = ( validated = true; } + if (columnSpec.type === 'stringOrNumber') { + validated = type === 'string' || type === 'number'; + } + // Undefined values only allowed if column spec is set to optional if (isUndefined(value)) { validated = Boolean(columnSpec.optional); diff --git a/api/tsconfig.json b/api/tsconfig.json index 605eec1ca6..475df5b4f0 100644 --- a/api/tsconfig.json +++ b/api/tsconfig.json @@ -20,6 +20,7 @@ "resolveJsonModule": true, "isolatedModules": true, "noFallthroughCasesInSwitch": true, + "noErrorTruncation": true, "strict": true, "typeRoots": ["node_modules/@types", "src/types"] }, diff --git a/app/src/components/buttons/BreadcrumbNavButton.tsx b/app/src/components/buttons/BreadcrumbNavButton.tsx index ba5df84178..5a65ecbaf0 100644 --- a/app/src/components/buttons/BreadcrumbNavButton.tsx +++ b/app/src/components/buttons/BreadcrumbNavButton.tsx @@ -45,7 +45,7 @@ export const BreadcrumbNavButton = (props: PropsWithChildren { handleMenuClose(); }}> - {item.icon && } + {item.icon ? : null} {item.label} ))} diff --git a/app/src/components/data-grid/autocomplete/AutocompleteDataGridEditCell.tsx b/app/src/components/data-grid/autocomplete/AutocompleteDataGridEditCell.tsx index 657c015fd7..8213658000 100644 --- a/app/src/components/data-grid/autocomplete/AutocompleteDataGridEditCell.tsx +++ b/app/src/components/data-grid/autocomplete/AutocompleteDataGridEditCell.tsx @@ -126,7 +126,7 @@ const AutocompleteDataGridEditCell = { return ( - + {renderOption.label} ); diff --git a/app/src/components/dialog/ErrorDialog.tsx b/app/src/components/dialog/ErrorDialog.tsx index 4484801de0..80832c2f32 100644 --- a/app/src/components/dialog/ErrorDialog.tsx +++ b/app/src/components/dialog/ErrorDialog.tsx @@ -10,7 +10,7 @@ import DialogTitle from '@mui/material/DialogTitle'; import Link from '@mui/material/Link'; import List from '@mui/material/List'; import Stack from '@mui/material/Stack'; -import React from 'react'; +import React, { useEffect } from 'react'; export interface IErrorDialogProps { /** @@ -82,6 +82,13 @@ export const ErrorDialog = (props: IErrorDialogProps) => { return {items}; }; + useEffect(() => { + if (isExpanded && !props.open) { + // If the detailed error section was open, and the dialog was closed, close the detailed error section + setIsExpanded(false); + } + }, [isExpanded, props.open]); + if (!props.open) { return <>; } diff --git a/app/src/components/fields/AnimalAutocompleteField.tsx b/app/src/components/fields/AnimalAutocompleteField.tsx index 755212e5c9..1ffbcb207a 100644 --- a/app/src/components/fields/AnimalAutocompleteField.tsx +++ b/app/src/components/fields/AnimalAutocompleteField.tsx @@ -1,4 +1,4 @@ -import Autocomplete from '@mui/material/Autocomplete'; +import Autocomplete, { createFilterOptions } from '@mui/material/Autocomplete'; import Box from '@mui/material/Box'; import CircularProgress from '@mui/material/CircularProgress'; import grey from '@mui/material/colors/grey'; @@ -10,7 +10,7 @@ import { useFormikContext } from 'formik'; import { useSurveyContext } from 'hooks/useContext'; import { ICritterSimpleResponse } from 'interfaces/useCritterApi.interface'; import { get } from 'lodash-es'; -import { useState } from 'react'; +import { useEffect, useState } from 'react'; export interface IAnimalAutocompleteFieldProps { /** @@ -93,6 +93,15 @@ export const AnimalAutocompleteField = (props: IAnima // The input field value const [inputValue, setInputValue] = useState(defaultAnimal?.animal_id ?? ''); + useEffect(() => { + if (!defaultAnimal) { + return; + } + + // Set the input value to the default animal's animal_id + setInputValue(String(defaultAnimal.animal_id)); + }, [defaultAnimal]); + // Survey animals to choose from const options = surveyContext.critterDataLoader.data; @@ -108,7 +117,7 @@ export const AnimalAutocompleteField = (props: IAnima isOptionEqualToValue={(option, value) => { return option.critter_id === value.critter_id; }} - filterOptions={(item) => item} + filterOptions={createFilterOptions()} inputValue={inputValue} onInputChange={(_, _value, reason) => { if (clearOnSelect && reason === 'clear') { @@ -160,7 +169,7 @@ export const AnimalAutocompleteField = (props: IAnima error={get(touched, formikFieldName) && Boolean(get(errors, formikFieldName))} helperText={get(touched, formikFieldName) && get(errors, formikFieldName)} fullWidth - placeholder={placeholder || 'Search for an animal in the Survey'} + placeholder={placeholder ?? 'Search for an animal in the Survey'} InputProps={{ ...params.InputProps, endAdornment: ( diff --git a/app/src/components/fields/DateField.tsx b/app/src/components/fields/DateField.tsx index fd1a4b5f69..ee1a9b01ea 100644 --- a/app/src/components/fields/DateField.tsx +++ b/app/src/components/fields/DateField.tsx @@ -1,13 +1,13 @@ import { mdiCalendar } from '@mdi/js'; import { Icon } from '@mdi/react'; -import { DatePicker, LocalizationProvider } from '@mui/x-date-pickers'; +import { DatePicker, DatePickerProps, LocalizationProvider } from '@mui/x-date-pickers'; import { AdapterDayjs } from '@mui/x-date-pickers/AdapterDayjs'; import { DATE_FORMAT, DATE_LIMIT } from 'constants/dateTimeFormats'; -import dayjs from 'dayjs'; +import dayjs, { Dayjs } from 'dayjs'; import { useFormikContext } from 'formik'; import { get } from 'lodash-es'; -interface IDateFieldProps { +interface IDateFieldProps extends DatePickerProps { label: string; name: string; id: string; @@ -28,6 +28,7 @@ export const DateField = (props: IDateF return ( }} diff --git a/app/src/components/fields/DeviceAutocompleteField.tsx b/app/src/components/fields/DeviceAutocompleteField.tsx new file mode 100644 index 0000000000..1a6b5d9dfc --- /dev/null +++ b/app/src/components/fields/DeviceAutocompleteField.tsx @@ -0,0 +1,190 @@ +import Autocomplete, { createFilterOptions } from '@mui/material/Autocomplete'; +import Box from '@mui/material/Box'; +import grey from '@mui/material/colors/grey'; +import TextField from '@mui/material/TextField'; +import Typography from '@mui/material/Typography'; +import { IAutocompleteFieldOption } from 'components/fields/AutocompleteField'; +import { useFormikContext } from 'formik'; +import { useCodesContext } from 'hooks/useContext'; +import { TelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import { get } from 'lodash-es'; +import { useEffect, useState } from 'react'; + +export interface IDeviceAutocompleteFieldProps { + /** + * Formik field name. + * + * @type {string} + * @memberof IDeviceAutocompleteFieldProps + */ + formikFieldName: string; + /** + * The field label. + * + * @type {string} + * @memberof IDeviceAutocompleteFieldProps + */ + label: string; + /** + * The array of options to choose from. + * + * @type {TelemetryDevice[]} + * @memberof IDeviceAutocompleteFieldProps + */ + options: TelemetryDevice[]; + /** + * Callback fired on option selection. + * + * @memberof IDeviceAutocompleteFieldProps + */ + onSelect: (device: TelemetryDevice) => void; + /** + * Optional callback fired on option de-selected/cleared. + * + * @memberof IDeviceAutocompleteFieldProps + */ + onClear?: () => void; + /** + * Default device to render for input and options. + * + * @type {TelemetryDevice} + * @memberof IDeviceAutocompleteFieldProps + */ + defaultDevice?: TelemetryDevice; + /** + * If field is required. + * + * @type {boolean} + * @memberof IDeviceAutocompleteFieldProps + */ + required?: boolean; + /** + * If field is disabled. + * + * @type {boolean} + * @memberof IDeviceAutocompleteFieldProps + */ + disabled?: boolean; + /** + * If `true`, clears the input field after a selection is made. + * + * @memberof IDeviceAutocompleteFieldProps + */ + clearOnSelect?: boolean; + /** + * Placeholder text for the TextField + * + * @type {string} + * @memberof IDeviceAutocompleteFieldProps + */ + placeholder?: string; +} + +/** + * An autocomplete field for selecting an existing device from the Survey. + * + * @template T + * @param {IDeviceAutocompleteFieldProps} props + * @return {*} + */ +export const DeviceAutocompleteField = (props: IDeviceAutocompleteFieldProps) => { + const { formikFieldName, label, options, onSelect, defaultDevice, required, disabled, clearOnSelect, placeholder } = + props; + + const { touched, errors, setFieldValue } = useFormikContext>(); + + const codesContext = useCodesContext(); + + useEffect(() => { + codesContext.codesDataLoader.load(); + }, [codesContext.codesDataLoader]); + + // The input field value + const [inputValue, setInputValue] = useState(String(defaultDevice?.device_id ?? '')); + + useEffect(() => { + if (!defaultDevice) { + return; + } + + // Set the input value to the default device's serial + setInputValue(String(defaultDevice.serial)); + }, [defaultDevice]); + + return ( + option.serial} + filterOptions={createFilterOptions()} + isOptionEqualToValue={(option, value) => { + return option.device_id === value.device_id; + }} + inputValue={inputValue} + onInputChange={(_, _value, reason) => { + if (clearOnSelect && reason === 'clear') { + setFieldValue(formikFieldName, ''); + setInputValue(''); + } + }} + onChange={(_, option) => { + if (option) { + onSelect(option); + setInputValue(String(option.serial)); + } + }} + renderOption={(renderProps, renderOption) => { + return ( + + + + {renderOption.serial}  + + { + codesContext.codesDataLoader.data?.telemetry_device_makes.find( + (make) => make.id === renderOption.device_make_id + )?.name + } + + + + {renderOption.model} + + + + ); + }} + renderInput={(params) => ( + setInputValue(event.currentTarget.value)} + required={required} + sx={{ opacity: props?.disabled ? 0.25 : 1 }} + error={get(touched, formikFieldName) && Boolean(get(errors, formikFieldName))} + helperText={get(touched, formikFieldName) && get(errors, formikFieldName)} + fullWidth + placeholder={placeholder ?? 'Search for a device in the Survey'} + InputProps={{ + ...params.InputProps, + endAdornment: <>{params.InputProps.endAdornment} + }} + /> + )} + /> + ); +}; diff --git a/app/src/components/fields/HorizontalSplitFormComponent.tsx b/app/src/components/fields/HorizontalSplitFormComponent.tsx index ae898a41f9..fe6f75e6c4 100644 --- a/app/src/components/fields/HorizontalSplitFormComponent.tsx +++ b/app/src/components/fields/HorizontalSplitFormComponent.tsx @@ -17,7 +17,7 @@ export interface IHorizontalSplitFormComponentProps { * @type {string} * @memberof IHorizontalSplitFormComponentProps */ - summary?: string; + summary?: string | ReactElement; /** * The form component to render * @@ -43,7 +43,7 @@ const HorizontalSplitFormComponent = (props: PropsWithChildren {summary && ( - + {summary} diff --git a/app/src/components/fields/MultiAutocompleteField.tsx b/app/src/components/fields/MultiAutocompleteField.tsx index e26c903fc2..56323d10e0 100644 --- a/app/src/components/fields/MultiAutocompleteField.tsx +++ b/app/src/components/fields/MultiAutocompleteField.tsx @@ -178,7 +178,10 @@ const MultiAutocompleteField: React.FC = (props) => { return; } - return tagValue.map((option, index) => ); + return tagValue.map((option, index) => { + const { key, ...tagProps } = getTagProps({ index }); + return ; + }); }} /> ); diff --git a/app/src/components/fields/SystemUserAutocompleteField.tsx b/app/src/components/fields/SystemUserAutocompleteField.tsx index ac02339761..bee5653901 100644 --- a/app/src/components/fields/SystemUserAutocompleteField.tsx +++ b/app/src/components/fields/SystemUserAutocompleteField.tsx @@ -227,7 +227,7 @@ export const SystemUserAutocompleteField = (props: ISystemUserAutocompleteFieldP {...params} variant="outlined" label={label} - placeholder={placeholder || 'Search by user'} + placeholder={placeholder ?? 'Search by user'} fullWidth InputProps={{ ...params.InputProps, diff --git a/app/src/components/fields/TelemetrySelectField.tsx b/app/src/components/fields/TelemetrySelectField.tsx deleted file mode 100644 index 9defd91488..0000000000 --- a/app/src/components/fields/TelemetrySelectField.tsx +++ /dev/null @@ -1,61 +0,0 @@ -import FormControl, { FormControlProps } from '@mui/material/FormControl'; -import FormHelperText from '@mui/material/FormHelperText'; -import InputLabel from '@mui/material/InputLabel'; -import MenuItem from '@mui/material/MenuItem'; -import Select from '@mui/material/Select'; -import { FormikContextType, useFormikContext } from 'formik'; -import useDataLoader from 'hooks/useDataLoader'; -import get from 'lodash-es/get'; -import React from 'react'; - -interface IAllTelemetrySelectField { - name: string; - label: string; - id: string; - fetchData: () => Promise<(string | number)[]>; - controlProps?: FormControlProps; - handleBlur?: FormikContextType['handleBlur']; - handleChange?: FormikContextType['handleChange']; -} - -interface ISelectOption { - value: string | number; - label: string; -} - -const TelemetrySelectField: React.FC = (props) => { - const bctwLookupLoader = useDataLoader(() => props.fetchData()); - const { values, touched, errors, handleChange, handleBlur } = useFormikContext(); - - const err = get(touched, props.name) && get(errors, props.name); - - if (!bctwLookupLoader.data) { - bctwLookupLoader.load(); - } - - const value = bctwLookupLoader.hasLoaded && get(values, props.name) ? get(values, props.name) : ''; - - return ( - - {props.label} - - {err} - - ); -}; - -export default TelemetrySelectField; diff --git a/app/src/components/species/components/SpeciesAutocompleteField.tsx b/app/src/components/species/components/SpeciesAutocompleteField.tsx index 7789b7b81f..b37aaf7e97 100644 --- a/app/src/components/species/components/SpeciesAutocompleteField.tsx +++ b/app/src/components/species/components/SpeciesAutocompleteField.tsx @@ -295,7 +295,7 @@ const SpeciesAutocompleteField = (props: ISpeciesAutocompleteFieldProps) => { label={label} variant="outlined" fullWidth - placeholder={placeholder || 'Enter a species or taxon'} + placeholder={placeholder ?? 'Enter a species or taxon'} InputProps={{ ...params.InputProps, startAdornment: showStartAdornment && ( diff --git a/app/src/constants/errors.ts b/app/src/constants/errors.ts new file mode 100644 index 0000000000..496b06be2f --- /dev/null +++ b/app/src/constants/errors.ts @@ -0,0 +1,25 @@ +/** + * This is a substring of the database's foreign key constraint error message, used to catch + * foreign key constraint errors when trying to delete a record and displaying a more useful error message + * + * ie. While trying to delete a device: + * + * if (error.includes(FOREIGN_KEY_CONSTRAINT_ERROR)) { + * return "Delete the associated deployment before deleting the device" + * } + * + */ +export const FOREIGN_KEY_CONSTRAINT_ERROR = 'foreign key constraint'; + +/** + * This is a substring of the database's unique constraint error message, used to catch + * unique constraint errors when trying to insert a record + * + * ie. While trying to create a device: + * + * if (error.includes(UNIQUE_CONSTRAINT_ERROR)) { + * return "That device already eixsts in the Survey" + * } + * + */ +export const UNIQUE_CONSTRAINT_ERROR = 'already exists'; diff --git a/app/src/constants/i18n.ts b/app/src/constants/i18n.ts index 6ee3b34128..b7bc931c0b 100644 --- a/app/src/constants/i18n.ts +++ b/app/src/constants/i18n.ts @@ -477,6 +477,17 @@ export const TelemetryDeviceKeyFileI18N = { 'An error has occurred while attempting to download the device key file, please try again. If the error persists, please contact your system administrator.' }; +export const TelemetryDeviceI18N = { + cancelTitle: 'Discard changes and exit?', + cancelText: 'Any changes you have made will not be saved. Do you want to proceed?', + createErrorTitle: 'Error Creating Device', + createErrorText: + 'An error has occurred while attempting to create your device. Please try again. If the error persists, please contact your system administrator.', + editErrorTitle: 'Error Editing Device', + editErrorText: + 'An error has occurred while attempting to edit your device. Please try again. If the error persists, please contact your system administrator.' +}; + export const CreateAnimalDeploymentI18N = { cancelTitle: 'Discard changes and exit?', cancelText: 'Any changes you have made will not be saved. Do you want to proceed?', @@ -490,7 +501,7 @@ export const EditAnimalDeploymentI18N = { cancelText: 'Any changes you have made will not be saved. Do you want to proceed?', createErrorTitle: 'Error Creating Deployment', createErrorText: - 'An error has occurred while attempting to create your deployment. Please try again. If the error persists, please contact your system administrator.' + 'An error has occurred while attempting to edit your deployment. Please try again. If the error persists, please contact your system administrator.' }; export const SurveyExportI18N = { diff --git a/app/src/contexts/animalPageContext.tsx b/app/src/contexts/animalPageContext.tsx index eb7048289e..45716a5852 100644 --- a/app/src/contexts/animalPageContext.tsx +++ b/app/src/contexts/animalPageContext.tsx @@ -53,7 +53,7 @@ export const AnimalPageContextProvider = (props: PropsWithChildren - biohubApi.survey.getCritterById(projectId, surveyId, critterId) + biohubApi.survey.getCritterById(projectId, surveyId, critterId, ['attachments']) ); // The currently selected animal diff --git a/app/src/contexts/observationsContext.tsx b/app/src/contexts/observationsContext.tsx index a5d92d4d86..2d469b74a2 100644 --- a/app/src/contexts/observationsContext.tsx +++ b/app/src/contexts/observationsContext.tsx @@ -1,7 +1,6 @@ import { useBiohubApi } from 'hooks/useBioHubApi'; import useDataLoader, { DataLoader } from 'hooks/useDataLoader'; import { IGetSurveyObservationsResponse } from 'interfaces/useObservationApi.interface'; -import { IPartialTaxonomy } from 'interfaces/useTaxonomyApi.interface'; import { createContext, PropsWithChildren, useContext } from 'react'; import { ApiPaginationRequestOptions } from 'types/misc'; import { SurveyContext } from './surveyContext'; @@ -21,10 +20,6 @@ export type IObservationsContext = { IGetSurveyObservationsResponse, unknown >; - /** - * Data Loader used for retrieving species observed in a survey - */ - observedSpeciesDataLoader: DataLoader<[], IPartialTaxonomy[], unknown>; }; export const ObservationsContext = createContext(undefined); @@ -38,11 +33,8 @@ export const ObservationsContextProvider = (props: PropsWithChildren biohubApi.observation.getObservedSpecies(projectId, surveyId)); - const observationsContext: IObservationsContext = { - observationsDataLoader, - observedSpeciesDataLoader + observationsDataLoader }; return {props.children}; diff --git a/app/src/contexts/telemetryDataContext.tsx b/app/src/contexts/telemetryDataContext.tsx deleted file mode 100644 index 86a0f70635..0000000000 --- a/app/src/contexts/telemetryDataContext.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import { useBiohubApi } from 'hooks/useBioHubApi'; -import useDataLoader, { DataLoader } from 'hooks/useDataLoader'; -import { WarningSchema } from 'interfaces/useBioHubApi.interface'; -import { IAllTelemetry, IAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; -import { createContext, PropsWithChildren, useMemo } from 'react'; - -/** - * Context object that stores information about a survey - * - * @export - * @interface ITelemetryDataContext - */ -export interface ITelemetryDataContext { - /** - * The Data Loader used to load deployments. - * - * @type {DataLoader<[project_id: number, survey_id: number], { deployments: IAnimalDeployment[]; bad_deployments: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }>[] }, unknown>} - * @memberof ITelemetryDataContext - */ - deploymentsDataLoader: DataLoader< - [project_id: number, survey_id: number], - { - deployments: IAnimalDeployment[]; - bad_deployments: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }>[]; - }, - unknown - >; - /** - * The Data Loader used to load telemetry. - * - * @type {DataLoader<[deploymentIds: string[]], IAllTelemetry[], unknown>} - * @memberof ITelemetryDataContext - */ - telemetryDataLoader: DataLoader<[deploymentIds: string[]], IAllTelemetry[], unknown>; -} - -export const TelemetryDataContext = createContext(undefined); - -export const TelemetryDataContextProvider = (props: PropsWithChildren>) => { - const biohubApi = useBiohubApi(); - - const deploymentsDataLoader = useDataLoader(biohubApi.survey.getDeploymentsInSurvey); - const telemetryDataLoader = useDataLoader(biohubApi.telemetry.getAllTelemetryByDeploymentIds); - - const telemetryDataContext: ITelemetryDataContext = useMemo(() => { - return { - deploymentsDataLoader, - telemetryDataLoader - }; - }, [deploymentsDataLoader, telemetryDataLoader]); - - return {props.children}; -}; diff --git a/app/src/contexts/telemetryTableContext.tsx b/app/src/contexts/telemetryTableContext.tsx index 3be0d1c4ff..cc59f6e88d 100644 --- a/app/src/contexts/telemetryTableContext.tsx +++ b/app/src/contexts/telemetryTableContext.tsx @@ -2,31 +2,38 @@ import Typography from '@mui/material/Typography'; import { GridCellParams, GridColumnVisibilityModel, + GridPaginationModel, GridRowId, GridRowModes, GridRowModesModel, GridRowSelectionModel, + GridSortModel, GridValidRowModel, useGridApiRef } from '@mui/x-data-grid'; import { GridApiCommunity, GridStateColDef } from '@mui/x-data-grid/internals'; +import { RowValidationError, TableValidationModel } from 'components/data-grid/DataGridValidationAlert'; import { TelemetryTableI18N } from 'constants/i18n'; import { SIMS_TELEMETRY_HIDDEN_COLUMNS } from 'constants/session-storage'; -import { DialogContext } from 'contexts/dialogContext'; import { default as dayjs } from 'dayjs'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useDialogContext, useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; import { usePersistentState } from 'hooks/usePersistentState'; -import { IAllTelemetry } from 'interfaces/useTelemetryApi.interface'; -import { createContext, PropsWithChildren, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'; +import { GetSurveyTelemetryResponse } from 'interfaces/useTelemetryApi.interface'; +import { createContext, PropsWithChildren, useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { ApiPaginationRequestOptions } from 'types/misc'; +import { firstOrNull } from 'utils/Utils'; import { v4 as uuidv4 } from 'uuid'; -import { RowValidationError, TableValidationModel } from '../components/data-grid/DataGridValidationAlert'; + +export const MANUAL_TELEMETRY_TYPE = 'manual'; export interface IManualTelemetryRecord { - deployment_id: string; - device_id: string; - latitude: number; - longitude: number; + deployment_id: number; + serial: string; + latitude: number | null; + longitude: number | null; date: string; time: string; telemetry_type: string; @@ -68,9 +75,24 @@ export type IAllTelemetryTableContext = { * Reflects the total count of telemetry records for the survey */ recordCount: number; + /** + * The pagination model, which defines which telemetry records to fetch and load in the table. + */ + paginationModel: GridPaginationModel; + /** + * Sets the pagination model. + */ + setPaginationModel: (model: GridPaginationModel) => void; + /** + * The sort model, which defines how the telemetry records should be sorted. + */ + sortModel: GridSortModel; + /** + * Sets the sort model. + */ + setSortModel: (mode: GridSortModel) => void; /** * Columns hidden from table view - * */ hiddenColumns: string[]; /** @@ -100,7 +122,7 @@ export type IAllTelemetryTableContext = { /** * Refreshes the Telemetry Table with already existing records */ - refreshRecords: () => Promise; + refreshRecords: () => Promise; /** * The IDs of the selected telemetry table rows */ @@ -146,20 +168,28 @@ export type IAllTelemetryTableContext = { export const TelemetryTableContext = createContext(undefined); -type IAllTelemetryTableContextProviderProps = PropsWithChildren<{ - isLoading: boolean; - telemetryData: IAllTelemetry[]; - refreshRecords: () => Promise; -}>; +type IAllTelemetryTableContextProviderProps = PropsWithChildren; export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextProviderProps) => { - const { children, isLoading, telemetryData, refreshRecords } = props; + const { children } = props; const _muiDataGridApiRef = useGridApiRef(); const biohubApi = useBiohubApi(); - const dialogContext = useContext(DialogContext); + const surveyContext = useSurveyContext(); + const dialogContext = useDialogContext(); + + const telemetryDataLoader = useDataLoader((pagination?: ApiPaginationRequestOptions) => + biohubApi.telemetry.getTelemetryForSurvey(surveyContext.projectId, surveyContext.surveyId, pagination) + ); + + const { + data: telemetryData, + isLoading: isLoadingTelemetryData, + hasLoaded: hasLoadedTelemetryData, + refresh: refreshTelemetryData + } = telemetryDataLoader; // The data grid rows const [rows, setRows] = useState([]); @@ -189,7 +219,16 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr const _isSavingData = useRef(false); // Count of table records - const recordCount = rows.length; + const recordCount = telemetryData?.count ?? 0; + + // Pagination model + const [paginationModel, setPaginationModel] = useState({ + page: 0, + pageSize: 25 + }); + + // Sort model + const [sortModel, setSortModel] = useState([{ field: 'date', sort: 'desc' }]); // True if table has unsaved changes, deferring value to prevent ui issue with controls rendering const hasUnsavedChanges = _modifiedRowIds.current.length > 0 || _stagedRowIds.current.length > 0; @@ -421,7 +460,11 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr try { if (modifiedRowIdsToDelete.length) { - await biohubApi.telemetry.deleteManualTelemetry(modifiedRowIdsToDelete); + await biohubApi.telemetry.deleteManualTelemetry( + surveyContext.projectId, + surveyContext.surveyId, + modifiedRowIdsToDelete + ); } // Remove row IDs from validation model @@ -470,7 +513,7 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr }); } }, - [biohubApi, dialogContext] + [biohubApi.telemetry, dialogContext, surveyContext.projectId, surveyContext.surveyId] ); /** @@ -539,12 +582,12 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr const newRecord: IManualTelemetryTableRow = { id, - deployment_id: '', + deployment_id: '' as unknown as number, latitude: '' as unknown as number, // empty strings to satisfy text fields longitude: '' as unknown as number, date: '', time: '', - telemetry_type: 'MANUAL' + telemetry_type: MANUAL_TELEMETRY_TYPE }; // Append new record to initial rows @@ -577,7 +620,36 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr }, [rows, _updateRowsMode, _modifiedRowIds]); /** - * Dispatches update and create requests to BCTW + * Refreshes the observations table with the latest records from the server. + * + * @return {*} + */ + const refreshTelemetryRecords = useCallback(async () => { + const sort = firstOrNull(sortModel); + + let sortField = sort?.field; + + // Convert frontend column names to the backend column names supported by the api + if (sortField === 'date') { + sortField = 'acquisition_date'; + } else if (sortField === 'time') { + sortField = 'acquisition_time'; + } else if (sortField === 'telemetry_type') { + sortField = 'vendor'; + } + + return refreshTelemetryData({ + limit: paginationModel.pageSize, + sort: sortField || undefined, + order: sort?.sort || undefined, + + // API pagination pages begin at 1, but MUI DataGrid pagination begins at 0. + page: paginationModel.page + 1 + }); + }, [paginationModel.page, paginationModel.pageSize, refreshTelemetryData, sortModel]); + + /** + * Dispatches update and create requests to SIMS * * @param {GridValidRowModel[]} createRows - Rows to create * @param {GridValidRowModel[]} updateRows - Rows to update @@ -588,26 +660,29 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr try { // create a new records const createData = createRows.map((row) => ({ - deployment_id: String(row.deployment_id), + deployment_id: row.deployment_id, latitude: Number(row.latitude), longitude: Number(row.longitude), - acquisition_date: dayjs(`${row.date}T${row.time}`).toISOString() + acquisition_date: dayjs(`${row.date}T${row.time}`).toISOString(), + transmission_date: null })); // update existing records const updateData = updateRows.map((row) => ({ telemetry_manual_id: String(row.id), + deployment_id: row.deployment_id, latitude: Number(row.latitude), longitude: Number(row.longitude), - acquisition_date: dayjs(`${row.date}T${row.time}`).toISOString() + acquisition_date: dayjs(`${row.date}T${row.time}`).toISOString(), + transmission_date: null })); if (createData.length) { - await biohubApi.telemetry.createManualTelemetry(createData); + await biohubApi.telemetry.createManualTelemetry(surveyContext.projectId, surveyContext.surveyId, createData); } if (updateData.length) { - await biohubApi.telemetry.updateManualTelemetry(updateData); + await biohubApi.telemetry.updateManualTelemetry(surveyContext.projectId, surveyContext.surveyId, updateData); } revertRecords(); @@ -621,7 +696,7 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr open: true }); - return refreshRecords(); + return refreshTelemetryRecords(); } catch (error) { _updateRowsMode(_modifiedRowIds.current, GridRowModes.Edit, true); const apiError = error as APIError; @@ -637,7 +712,15 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr _isSavingData.current = false; } }, - [dialogContext, _updateRowsMode, _isSavingData, revertRecords, refreshRecords, biohubApi] + [ + _updateRowsMode, + biohubApi.telemetry, + dialogContext, + refreshTelemetryRecords, + revertRecords, + surveyContext.projectId, + surveyContext.surveyId + ] ); /** @@ -671,32 +754,45 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr await _saveRecords(newRows, updateRows); }, [_validateRows, _getEditedIds, _getEditedRows, _saveRecords]); + /** + * Fetch new rows based on sort/ pagination model changes + */ + useEffect(() => { + refreshTelemetryRecords(); + // Should not re-run this effect on `refreshObservationRecords` changes + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [paginationModel, sortModel]); + /** * Parse the telemetry data to the table format and set the rows. * */ useEffect(() => { - if (!telemetryData) { - // No telemetry data, clear the table - setRows([]); + if (!hasLoadedTelemetryData) { + // Existing telemetry records have not yet loaded + return; + } + + if (!telemetryData?.telemetry) { + // Existing telemetry data doesn't exist return; } - const rows: IManualTelemetryTableRow[] = telemetryData.map((item) => { + const rows: IManualTelemetryTableRow[] = telemetryData.telemetry.map((item) => { return { - id: item.id, + id: item.telemetry_id, deployment_id: item.deployment_id, - device_id: item.device_id, + serial: item.serial, latitude: item.latitude, longitude: item.longitude, date: dayjs(item.acquisition_date).format('YYYY-MM-DD'), time: dayjs(item.acquisition_date).format('HH:mm:ss'), - telemetry_type: item.telemetry_type + telemetry_type: item.vendor }; }); setRows(rows); - }, [telemetryData]); + }, [hasLoadedTelemetryData, telemetryData]); const telemetryTableContext: IAllTelemetryTableContext = useMemo( () => ({ @@ -710,7 +806,7 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr deleteRecords, deleteSelectedRecords, revertRecords, - refreshRecords, + refreshRecords: refreshTelemetryRecords, hasUnsavedChanges, rowSelectionModel, onRowSelectionModelChange: setRowSelectionModel, @@ -718,36 +814,42 @@ export const TelemetryTableContextProvider = (props: IAllTelemetryTableContextPr onRowModesModelChange: setRowModesModel, columnVisibilityModel, onColumnVisibilityModelChange: setColumnVisibilityModel, - isLoading, + isLoading: isLoadingTelemetryData, isSaving: _isSavingData.current, validationModel, recordCount, + paginationModel, + setPaginationModel, + sortModel, + setSortModel, toggleColumnsVisibility, hiddenColumns, onRowEditStart }), [ _muiDataGridApiRef, - rows, - getColumns, addRecord, - hasError, - saveRecords, + columnVisibilityModel, deleteRecords, deleteSelectedRecords, - revertRecords, - refreshRecords, + getColumns, + hasError, hasUnsavedChanges, - rowSelectionModel, - rowModesModel, - isLoading, - validationModel, + hiddenColumns, + isLoadingTelemetryData, + onRowEditStart, + paginationModel, recordCount, - columnVisibilityModel, + refreshTelemetryRecords, + revertRecords, + rowModesModel, + rowSelectionModel, + rows, + saveRecords, setColumnVisibilityModel, + sortModel, toggleColumnsVisibility, - hiddenColumns, - onRowEditStart + validationModel ] ); diff --git a/app/src/features/projects/ProjectsRouter.tsx b/app/src/features/projects/ProjectsRouter.tsx index 671a54c9e2..c12f681f05 100644 --- a/app/src/features/projects/ProjectsRouter.tsx +++ b/app/src/features/projects/ProjectsRouter.tsx @@ -1,7 +1,6 @@ import { ProjectRoleRouteGuard, SystemRoleRouteGuard } from 'components/security/RouteGuards'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from 'constants/roles'; import { DialogContextProvider } from 'contexts/dialogContext'; -import { ObservationsContextProvider } from 'contexts/observationsContext'; import { ProjectAuthStateContextProvider } from 'contexts/projectAuthStateContext'; import { ProjectContextProvider } from 'contexts/projectContext'; import { SurveyContextProvider } from 'contexts/surveyContext'; @@ -93,9 +92,7 @@ const ProjectsRouter: React.FC = () => { ]} validSystemRoles={[SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.DATA_ADMINISTRATOR]}> - - - + diff --git a/app/src/features/summary/list-data/project/ProjectsListContainer.tsx b/app/src/features/summary/list-data/project/ProjectsListContainer.tsx index f213e2ad3d..3d902bd852 100644 --- a/app/src/features/summary/list-data/project/ProjectsListContainer.tsx +++ b/app/src/features/summary/list-data/project/ProjectsListContainer.tsx @@ -50,7 +50,7 @@ interface IProjectsListContainerProps { } // Default pagination parameters -const ApiPaginationRequestOptionsInitialValues: Required = { +const initialPaginationParams: Required = { page: 0, limit: 10, sort: 'project_id', @@ -70,14 +70,14 @@ const ProjectsListContainer = (props: IProjectsListContainerProps) => { const { searchParams, setSearchParams } = useSearchParams>(); const [paginationModel, setPaginationModel] = useState({ - pageSize: Number(searchParams.get('p_limit') ?? ApiPaginationRequestOptionsInitialValues.limit), - page: Number(searchParams.get('p_page') ?? ApiPaginationRequestOptionsInitialValues.page) + pageSize: Number(searchParams.get('p_limit') ?? initialPaginationParams.limit), + page: Number(searchParams.get('p_page') ?? initialPaginationParams.page) }); const [sortModel, setSortModel] = useState([ { - field: searchParams.get('p_sort') ?? ApiPaginationRequestOptionsInitialValues.sort, - sort: (searchParams.get('p_order') ?? ApiPaginationRequestOptionsInitialValues.order) as GridSortDirection + field: searchParams.get('p_sort') ?? initialPaginationParams.sort, + sort: (searchParams.get('p_order') ?? initialPaginationParams.order) as GridSortDirection } ]); @@ -212,7 +212,7 @@ const ProjectsListContainer = (props: IProjectsListContainerProps) => { } isLoadingFallbackDelay={100} hasNoData={!rows.length} @@ -227,7 +227,7 @@ const ProjectsListContainer = (props: IProjectsListContainerProps) => { hasNoDataFallbackDelay={100}> { } isLoadingFallbackDelay={100} hasNoData={!rows.length} @@ -234,7 +234,7 @@ const SurveysListContainer = (props: ISurveysListContainerProps) => { hasNoDataFallbackDelay={100}> { } isLoadingFallbackDelay={100} hasNoData={!rows.length} @@ -174,7 +174,7 @@ const AnimalsListContainer = (props: IAnimalsListContainerProps) => { hasNoDataFallbackDelay={100}> = { page: 0, limit: 10, sort: 'survey_observation_id', - order: 'asc' + order: 'desc' }; /** @@ -248,7 +248,7 @@ const ObservationsListContainer = (props: IObservationsListContainerProps) => { } isLoadingFallbackDelay={100} hasNoData={!rows.length} @@ -263,7 +263,7 @@ const ObservationsListContainer = (props: IObservationsListContainerProps) => { hasNoDataFallbackDelay={100}> , - , - + , + ]} /> )} diff --git a/app/src/features/summary/tabular-data/telemetry/TelemetryListContainer.tsx b/app/src/features/summary/tabular-data/telemetry/TelemetryListContainer.tsx index e640b3512c..d41db8f05b 100644 --- a/app/src/features/summary/tabular-data/telemetry/TelemetryListContainer.tsx +++ b/app/src/features/summary/tabular-data/telemetry/TelemetryListContainer.tsx @@ -19,16 +19,21 @@ import { IFindTelementryObj } from 'interfaces/useTelemetryApi.interface'; import { useState } from 'react'; import { ApiPaginationRequestOptions, StringValues } from 'types/misc'; import { firstOrNull } from 'utils/Utils'; -import TelemetryListFilterForm, { +import { IAllTelemetryAdvancedFilters, - TelemetryAdvancedFiltersInitialValues + TelemetryAdvancedFiltersInitialValues, + TelemetryListFilterForm } from './TelemetryListFilterForm'; // Supported URL parameters // Note: Prefix 't_' is used to avoid conflicts with similar query params from other components type TelemetryDataTableURLParams = { // filter + t_keyword?: string; t_itis_tsn?: string; + t_start_date?: string; + t_end_date?: string; + t_system_user_id?: number; // pagination t_page?: string; t_limit?: string; @@ -43,15 +48,15 @@ interface IAllTelemetryListContainerProps { } // Default pagination parameters -const initialPaginationParams: ApiPaginationRequestOptions = { +const initialPaginationParams: Required = { page: 0, limit: 10, - sort: undefined, - order: undefined + sort: 'acquisition_date', + order: 'desc' }; /** - * Displays a list of telemtry. + * Displays a list of telemetry. * * @return {*} */ @@ -75,9 +80,15 @@ const TelemetryListContainer = (props: IAllTelemetryListContainerProps) => { ]); const [advancedFiltersModel, setAdvancedFiltersModel] = useState({ + keyword: searchParams.get('t_keyword') ?? TelemetryAdvancedFiltersInitialValues.keyword, itis_tsn: searchParams.get('t_itis_tsn') ? Number(searchParams.get('t_itis_tsn')) - : TelemetryAdvancedFiltersInitialValues.itis_tsn + : TelemetryAdvancedFiltersInitialValues.itis_tsn, + start_date: searchParams.get('t_start_date') ?? TelemetryAdvancedFiltersInitialValues.start_date, + end_date: searchParams.get('t_end_date') ?? TelemetryAdvancedFiltersInitialValues.end_date, + system_user_id: searchParams.get('t_system_user_id') + ? Number(searchParams.get('t_system_user_id')) + : TelemetryAdvancedFiltersInitialValues.system_user_id }); const sort = firstOrNull(sortModel); @@ -117,18 +128,18 @@ const TelemetryListContainer = (props: IAllTelemetryListContainerProps) => { ) }, { - field: 'animal_id', - headerName: 'Nickname', + field: 'device_id', + headerName: 'Device', flex: 1, sortable: false, - renderCell: (params) => {params.row.animal_id} + renderCell: (params) => {params.row.serial} }, { - field: 'device_id', - headerName: 'Device', + field: 'vendor', + headerName: 'Make', flex: 1, sortable: false, - renderCell: (params) => {params.row.device_id} + renderCell: (params) => {params.row.vendor} }, { field: 'acquisition_date', @@ -152,7 +163,14 @@ const TelemetryListContainer = (props: IAllTelemetryListContainerProps) => { { - setSearchParams(searchParams.setOrDelete('t_itis_tsn', values.itis_tsn)); + setSearchParams( + searchParams + .setOrDelete('t_keyword', values.keyword) + .setOrDelete('t_itis_tsn', values.itis_tsn) + .setOrDelete('t_start_date', values.start_date) + .setOrDelete('t_end_date', values.end_date) + .setOrDelete('t_system_user_id', values.system_user_id) + ); setAdvancedFiltersModel(values); }} /> @@ -162,7 +180,7 @@ const TelemetryListContainer = (props: IAllTelemetryListContainerProps) => { } isLoadingFallbackDelay={100} hasNoData={!rows.length} @@ -177,12 +195,12 @@ const TelemetryListContainer = (props: IAllTelemetryListContainerProps) => { hasNoDataFallbackDelay={100}> row.telemetry_id} // Pagination paginationMode="server" diff --git a/app/src/features/summary/tabular-data/telemetry/TelemetryListFilterForm.tsx b/app/src/features/summary/tabular-data/telemetry/TelemetryListFilterForm.tsx index d16e02529d..3af5b84001 100644 --- a/app/src/features/summary/tabular-data/telemetry/TelemetryListFilterForm.tsx +++ b/app/src/features/summary/tabular-data/telemetry/TelemetryListFilterForm.tsx @@ -1,15 +1,26 @@ import CustomTextField from 'components/fields/CustomTextField'; -import SpeciesAutocompleteField from 'components/species/components/SpeciesAutocompleteField'; +import SingleDateField from 'components/fields/SingleDateField'; import { FilterFieldsContainer } from 'features/summary/components/FilterFieldsContainer'; import { Formik } from 'formik'; -import { useTaxonomyContext } from 'hooks/useContext'; export type IAllTelemetryAdvancedFilters = { + keyword?: string; itis_tsn?: number; + start_date?: string; + end_date?: string; + start_time?: string; + end_time?: string; + system_user_id?: number; }; export const TelemetryAdvancedFiltersInitialValues: IAllTelemetryAdvancedFilters = { - itis_tsn: undefined + keyword: undefined, + itis_tsn: undefined, + start_date: undefined, + end_date: undefined, + start_time: undefined, + end_time: undefined, + system_user_id: undefined }; export interface IAllTelemetryListFilterFormProps { @@ -20,56 +31,28 @@ export interface IAllTelemetryListFilterFormProps { /** * Telemetry advanced filters * - * TODO: The filter fields are disabled for now. The fields are functional (the values are captured and passed to the - * backend), but the backend does not currently use them for filtering. - * * @param {IAllTelemetryListFilterFormProps} props * @return {*} */ -const TelemetryListFilterForm = (props: IAllTelemetryListFilterFormProps) => { +export const TelemetryListFilterForm = (props: IAllTelemetryListFilterFormProps) => { const { handleSubmit, initialValues } = props; - const taxonomyContext = useTaxonomyContext(); - return ( - {(formikProps) => ( - , - { - if (value?.tsn) { - formikProps.setFieldValue('itis_tsns', value.tsn); - } - }} - handleClear={() => { - formikProps.setFieldValue('itis_tsns', undefined); - }} - disabled={true} // See TODO - key="telemetry-tsn-filter" - /> - ]} - /> - )} + , + , + + ]} + /> ); }; - -export default TelemetryListFilterForm; diff --git a/app/src/features/surveys/SurveyRouter.tsx b/app/src/features/surveys/SurveyRouter.tsx index 6aac3c9c36..ef2e9716e3 100644 --- a/app/src/features/surveys/SurveyRouter.tsx +++ b/app/src/features/surveys/SurveyRouter.tsx @@ -2,7 +2,7 @@ import { ProjectRoleRouteGuard } from 'components/security/RouteGuards'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from 'constants/roles'; import { AnimalPageContextProvider } from 'contexts/animalPageContext'; import { DialogContextProvider } from 'contexts/dialogContext'; -import { TelemetryDataContextProvider } from 'contexts/telemetryDataContext'; +import { ObservationsContextProvider } from 'contexts/observationsContext'; import { AnimalRouter } from 'features/surveys/animals/AnimalRouter'; import EditSurveyPage from 'features/surveys/edit/EditSurveyPage'; import { SurveyObservationPage } from 'features/surveys/observations/SurveyObservationPage'; @@ -46,7 +46,11 @@ const SurveyRouter: React.FC = () => { {/* Animals Routes */} @@ -59,13 +63,13 @@ const SurveyRouter: React.FC = () => { {/* Telemetry Routes */} - - - - - + @@ -75,9 +79,15 @@ const SurveyRouter: React.FC = () => { path="/admin/projects/:id/surveys/:survey_id/observations" title={getTitle('Manage Observations')}> - + + + diff --git a/app/src/features/surveys/observations/observations-table/configure-columns/components/measurements/search/MeasurementsSearch.tsx b/app/src/features/surveys/observations/observations-table/configure-columns/components/measurements/search/MeasurementsSearch.tsx index d8c2d8762d..295bb60e38 100644 --- a/app/src/features/surveys/observations/observations-table/configure-columns/components/measurements/search/MeasurementsSearch.tsx +++ b/app/src/features/surveys/observations/observations-table/configure-columns/components/measurements/search/MeasurementsSearch.tsx @@ -2,7 +2,7 @@ import green from '@mui/material/colors/green'; import ColouredRectangleChip from 'components/chips/ColouredRectangleChip'; import { MeasurementsSearchAutocomplete } from 'features/surveys/observations/observations-table/configure-columns/components/measurements/search/MeasurementsSearchAutocomplete'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { useObservationsContext, useSurveyContext } from 'hooks/useContext'; +import { useSurveyContext } from 'hooks/useContext'; import { useCritterbaseApi } from 'hooks/useCritterbaseApi'; import useDataLoader from 'hooks/useDataLoader'; import { CBMeasurementType } from 'interfaces/useCritterApi.interface'; @@ -42,9 +42,12 @@ export const MeasurementsSearch: React.FC = (props) => const critterbaseApi = useCritterbaseApi(); const surveyContext = useSurveyContext(); - const observationsContext = useObservationsContext(); const biohubApi = useBiohubApi(); + const observedSpeciesDataLoader = useDataLoader(() => + biohubApi.observation.getObservedSpecies(surveyContext.projectId, surveyContext.surveyId) + ); + const measurementsDataLoader = useDataLoader((searchTerm: string, tsns?: number[]) => critterbaseApi.xref.getMeasurementTypeDefinitionsBySearchTerm(searchTerm, tsns) ); @@ -52,14 +55,14 @@ export const MeasurementsSearch: React.FC = (props) => const hierarchyDataLoader = useDataLoader((tsns: number[]) => biohubApi.taxonomy.getTaxonHierarchyByTSNs(tsns)); useEffect(() => { - if (!observationsContext.observedSpeciesDataLoader.data) { - observationsContext.observedSpeciesDataLoader.load(); + if (!observedSpeciesDataLoader.data) { + observedSpeciesDataLoader.load(); } - }, [observationsContext.observedSpeciesDataLoader]); + }, [observedSpeciesDataLoader]); const focalOrObservedSpecies: number[] = [ ...(surveyContext.surveyDataLoader.data?.surveyData.species.focal_species.map((species) => species.tsn) ?? []), - ...(observationsContext.observedSpeciesDataLoader.data?.map((species) => species.tsn) ?? []) + ...(observedSpeciesDataLoader.data?.map((species) => species.tsn) ?? []) ]; useEffect(() => { diff --git a/app/src/features/surveys/observations/sampling-sites/site/accordion-details/SamplingSiteListContent.tsx b/app/src/features/surveys/observations/sampling-sites/site/accordion-details/SamplingSiteListContent.tsx index 41f48384de..ccbb2aa41f 100644 --- a/app/src/features/surveys/observations/sampling-sites/site/accordion-details/SamplingSiteListContent.tsx +++ b/app/src/features/surveys/observations/sampling-sites/site/accordion-details/SamplingSiteListContent.tsx @@ -50,7 +50,7 @@ export const SamplingSiteListContent = (props: ISamplingSiteListContentProps) => return ( <> {sampleSite.stratums && sampleSite.stratums.length > 0 && ( - + )} diff --git a/app/src/features/surveys/sampling-information/SamplingRouter.tsx b/app/src/features/surveys/sampling-information/SamplingRouter.tsx index dd35c9af94..968504d3e8 100644 --- a/app/src/features/surveys/sampling-information/SamplingRouter.tsx +++ b/app/src/features/surveys/sampling-information/SamplingRouter.tsx @@ -22,18 +22,26 @@ export const SamplingRouter = () => { exact path="/admin/projects/:id/surveys/:survey_id/sampling" title={getTitle('Manage Sampling Information')}> - - - + + + + + - - - + + + + + { exact path="/admin/projects/:id/surveys/:survey_id/sampling/techniques/create" title={getTitle('Create Technique')}> - - - + + + + + - - - + + + + + ); diff --git a/app/src/features/surveys/telemetry/TelemetryPage.tsx b/app/src/features/surveys/telemetry/TelemetryPage.tsx index c91b025ff6..4e7202ba62 100644 --- a/app/src/features/surveys/telemetry/TelemetryPage.tsx +++ b/app/src/features/surveys/telemetry/TelemetryPage.tsx @@ -1,57 +1,16 @@ import Box from '@mui/material/Box'; import CircularProgress from '@mui/material/CircularProgress'; import Stack from '@mui/material/Stack'; -import { TelemetryTableContextProvider } from 'contexts/telemetryTableContext'; +import { TelemetryTableContext, TelemetryTableContextProvider } from 'contexts/telemetryTableContext'; import { SurveyDeploymentList } from 'features/surveys/telemetry/list/SurveyDeploymentList'; import { TelemetryTableContainer } from 'features/surveys/telemetry/table/TelemetryTableContainer'; import { TelemetryHeader } from 'features/surveys/telemetry/TelemetryHeader'; -import { useBiohubApi } from 'hooks/useBioHubApi'; import { useProjectContext, useSurveyContext } from 'hooks/useContext'; -import useDataLoader from 'hooks/useDataLoader'; -import { useEffect } from 'react'; export const TelemetryPage = () => { - const biohubApi = useBiohubApi(); - const projectContext = useProjectContext(); const surveyContext = useSurveyContext(); - const deploymentsDataLoader = useDataLoader(biohubApi.survey.getDeploymentsInSurvey); - const telemetryDataLoader = useDataLoader(biohubApi.telemetry.getAllTelemetryByDeploymentIds); - - /** - * Load the deployments and telemetry data when the page is initially loaded. - */ - useEffect(() => { - deploymentsDataLoader.load(surveyContext.projectId, surveyContext.surveyId).then((deployments) => { - const deploymentIds = deployments?.deployments.map((deployment) => deployment.bctw_deployment_id) ?? []; - - if (!deploymentIds.length) { - // No deployments, no telemetry to load - return; - } - - telemetryDataLoader.load(deploymentIds); - }); - }, [deploymentsDataLoader, surveyContext.projectId, surveyContext.surveyId, telemetryDataLoader]); - - /** - * Refresh the data for the telemetry page. - */ - const refreshData = async () => { - deploymentsDataLoader.refresh(surveyContext.projectId, surveyContext.surveyId).then((deployments) => { - const deploymentIds = deployments?.deployments.map((deployment) => deployment.bctw_deployment_id) ?? []; - - if (!deploymentIds.length) { - // No deployments, refresh (clear) the telemetry data - telemetryDataLoader.clearData(); - return; - } - - telemetryDataLoader.refresh(deploymentIds); - }); - }; - if (!surveyContext.surveyDataLoader.data || !projectContext.projectDataLoader.data) { return ; } @@ -75,24 +34,21 @@ export const TelemetryPage = () => { {/* Telematry List */} - { - refreshData(); - }} - /> + {/* Telemetry Component */} - { - refreshData(); - }}> - + + + {(context) => { + if (!context?._muiDataGridApiRef.current) { + // Delay rendering the ObservationsTable until the DataGrid API is available + return ; + } + + return ; + }} + diff --git a/app/src/features/surveys/telemetry/TelemetryRouter.tsx b/app/src/features/surveys/telemetry/TelemetryRouter.tsx index 999be6ba09..44ab0cbaa9 100644 --- a/app/src/features/surveys/telemetry/TelemetryRouter.tsx +++ b/app/src/features/surveys/telemetry/TelemetryRouter.tsx @@ -1,8 +1,11 @@ import { ProjectRoleRouteGuard } from 'components/security/RouteGuards'; import { PROJECT_PERMISSION, SYSTEM_ROLE } from 'constants/roles'; import { DialogContextProvider } from 'contexts/dialogContext'; -import { CreateDeploymentPage } from 'features/surveys/telemetry/deployments/create/CreateDeploymentPage'; -import { EditDeploymentPage } from 'features/surveys/telemetry/deployments/edit/EditDeploymentPage'; +import { CreateDeploymentPage } from 'features/surveys/telemetry/manage/deployments/create/CreateDeploymentPage'; +import { EditDeploymentPage } from 'features/surveys/telemetry/manage/deployments/edit/EditDeploymentPage'; +import { CreateDevicePage } from 'features/surveys/telemetry/manage/devices/create/CreateDevicePage'; +import { EditDevicePage } from 'features/surveys/telemetry/manage/devices/edit/EditDevicePage'; +import { DevicesAndDeploymentsManagePage } from 'features/surveys/telemetry/manage/DevicesAndDeploymentsManagePage'; import { TelemetryPage } from 'features/surveys/telemetry/TelemetryPage'; import { Redirect, Switch } from 'react-router'; import RouteWithTitle from 'utils/RouteWithTitle'; @@ -37,7 +40,46 @@ export const TelemetryRouter = () => { + + + + + + + + + + + + + + + + + + + + + + + + { { - const { isSubmitting, isEdit } = props; - - const { submitForm, values } = useFormikContext(); - - const surveyContext = useSurveyContext(); - - const biohubApi = useBiohubApi(); - - const history = useHistory(); - - const critterDataLoader = useDataLoader((critterId: number) => - biohubApi.survey.getCritterById(surveyContext.projectId, surveyContext.surveyId, critterId) - ); - - const frequencyUnitDataLoader = useDataLoader(() => biohubApi.telemetry.getCodeValues('frequency_unit')); - const deviceMakesDataLoader = useDataLoader(() => biohubApi.telemetry.getCodeValues('device_make')); - - // Fetch frequency unit and device make code values from BCTW on component mount - useEffect(() => { - frequencyUnitDataLoader.load(); - deviceMakesDataLoader.load(); - }, [deviceMakesDataLoader, frequencyUnitDataLoader]); - - // Fetch critter data when critter_id changes (ie. when the user selects a critter) - useEffect(() => { - if (values.critter_id) { - critterDataLoader.refresh(values.critter_id); - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [values.critter_id]); - - return ( - - - - - ({ label: data.code, value: data.id })) ?? []} - isEdit={isEdit} - /> - - - - - - - - - - - - ({ label: data.code, value: data.id })) ?? []} - /> - - - - - - { - submitForm(); - }}> - Save and Exit - - - - - - - ); -}; diff --git a/app/src/features/surveys/telemetry/deployments/components/form/DeploymentFormHeader.tsx b/app/src/features/surveys/telemetry/deployments/components/form/DeploymentFormHeader.tsx deleted file mode 100644 index b575cefc52..0000000000 --- a/app/src/features/surveys/telemetry/deployments/components/form/DeploymentFormHeader.tsx +++ /dev/null @@ -1,106 +0,0 @@ -import { LoadingButton } from '@mui/lab'; -import Breadcrumbs from '@mui/material/Breadcrumbs'; -import Button from '@mui/material/Button'; -import { grey } from '@mui/material/colors'; -import Container from '@mui/material/Container'; -import Link from '@mui/material/Link'; -import Paper from '@mui/material/Paper'; -import Stack from '@mui/material/Stack'; -import Typography from '@mui/material/Typography'; -import { useFormikContext } from 'formik'; -import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; -import { useHistory } from 'react-router'; -import { Link as RouterLink } from 'react-router-dom'; - -export interface IDeploymentFormHeaderProps { - project_id: number; - project_name: string; - survey_id: number; - survey_name: string; - is_submitting: boolean; - title: string; - breadcrumb: string; -} - -/** - * Renders the header of the create and edit deployment pages. - * - * @param {IDeploymentFormHeaderProps} props - * @return {*} - */ -export const DeploymentFormHeader = (props: IDeploymentFormHeaderProps) => { - const history = useHistory(); - const formikProps = useFormikContext(); - - const { project_id, survey_id, survey_name, project_name, is_submitting, title, breadcrumb } = props; - - return ( - <> - - - - - {project_name} - - - {survey_name} - - - Manage Telemetry - - - {breadcrumb} - - - - - {title} - - - { - formikProps.submitForm(); - }}> - Save and Exit - - - - - - - - ); -}; diff --git a/app/src/features/surveys/telemetry/deployments/components/form/deployment-details/DeploymentDetailsForm.tsx b/app/src/features/surveys/telemetry/deployments/components/form/deployment-details/DeploymentDetailsForm.tsx deleted file mode 100644 index 6f5c1ee0ba..0000000000 --- a/app/src/features/surveys/telemetry/deployments/components/form/deployment-details/DeploymentDetailsForm.tsx +++ /dev/null @@ -1,137 +0,0 @@ -import Grid from '@mui/material/Grid'; -import Stack from '@mui/material/Stack'; -import Typography from '@mui/material/Typography'; -import { AnimalAutocompleteField } from 'components/fields/AnimalAutocompleteField'; -import AutocompleteField, { IAutocompleteFieldOption } from 'components/fields/AutocompleteField'; -import CustomTextField from 'components/fields/CustomTextField'; -import { useFormikContext } from 'formik'; -import { useSurveyContext } from 'hooks/useContext'; -import { ICritterSimpleResponse } from 'interfaces/useCritterApi.interface'; -import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; -import { Link as RouterLink } from 'react-router-dom'; -import { isDefined } from 'utils/Utils'; -import yup from 'utils/YupSchema'; - -export const DeploymentDetailsFormInitialValues: yup.InferType = { - device_id: null as unknown as string, - critter_id: null as unknown as number, - frequency: null, - frequency_unit: null -}; - -export const DeploymentDetailsFormYupSchema = yup.object({ - device_id: yup.string().nullable().required('You must enter the device ID. This is typically the serial number'), - critter_id: yup.number().nullable().required('You must select the animal that the device is associated to'), - frequency: yup.lazy(() => - yup - .number() - .nullable() - .when('frequency_unit', { - is: (frequency_unit: number) => isDefined(frequency_unit), // when frequency_unit is defined - then: yup.number().nullable().required('Frequency is required') - }) - ), - frequency_unit: yup.lazy(() => - yup - .number() - .nullable() - .when('frequency', { - is: (frequency: number) => isDefined(frequency), // when frequency is defined - then: yup.number().nullable().required('Frequency unit is required') - }) - ) -}); - -interface IDeploymentDetailsFormProps { - surveyAnimals: ICritterSimpleResponse[]; - frequencyUnits: IAutocompleteFieldOption[]; - isEdit?: boolean; -} - -/** - * Deployment form - deployment details section. - * - * @param {IDeploymentDetailsFormProps} props - * @return {*} - */ -export const DeploymentDetailsForm = (props: IDeploymentDetailsFormProps) => { - const { surveyAnimals, frequencyUnits, isEdit } = props; - - const { setFieldValue, values } = useFormikContext(); - - const surveyContext = useSurveyContext(); - - return ( - <> - - - - You must  - - add the animal - -  to your Survey before associating it to a telemetry device. Add animals via the  - - Manage Animals - -  page. - - - - - animal.critter_id === values.critter_id)} - required - clearOnSelect - onSelect={(animal: ICritterSimpleResponse) => { - if (animal) { - setFieldValue('critter_id', animal.critter_id); - } - }} - /> - - - - - - - - - - ); -}; diff --git a/app/src/features/surveys/telemetry/deployments/components/form/device-details/DeploymentDeviceDetailsForm.tsx b/app/src/features/surveys/telemetry/deployments/components/form/device-details/DeploymentDeviceDetailsForm.tsx deleted file mode 100644 index 5e74ea44c4..0000000000 --- a/app/src/features/surveys/telemetry/deployments/components/form/device-details/DeploymentDeviceDetailsForm.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import Grid from '@mui/material/Grid'; -import AutocompleteField, { IAutocompleteFieldOption } from 'components/fields/AutocompleteField'; -import CustomTextField from 'components/fields/CustomTextField'; -import yup from 'utils/YupSchema'; - -export const DeploymentDeviceDetailsFormInitialValues: yup.InferType = { - device_make: null as unknown as number, - device_model: null -}; - -export const DeploymentDeviceDetailsFormYupSchema = yup.object({ - device_make: yup.number().nullable().required('You must enter the device make'), - device_model: yup.string().nullable() -}); - -interface IDeploymentDeviceDetailsFormProps { - deviceMakes: IAutocompleteFieldOption[]; -} - -/** - * Deployment form - device details section. - * - * @param {IDeploymentDeviceDetailsFormProps} props - * @return {*} - */ -export const DeploymentDeviceDetailsForm = (props: IDeploymentDeviceDetailsFormProps) => { - const { deviceMakes } = props; - - return ( - <> - - - - - - - - - - ); -}; diff --git a/app/src/features/surveys/telemetry/list/SurveyBadDeploymentListItem.tsx b/app/src/features/surveys/telemetry/list/SurveyBadDeploymentListItem.tsx deleted file mode 100644 index 47b0f21dda..0000000000 --- a/app/src/features/surveys/telemetry/list/SurveyBadDeploymentListItem.tsx +++ /dev/null @@ -1,147 +0,0 @@ -import { mdiChevronDown, mdiTrashCanOutline } from '@mdi/js'; -import Icon from '@mdi/react'; -import Accordion from '@mui/material/Accordion'; -import AccordionDetails from '@mui/material/AccordionDetails'; -import AccordionSummary from '@mui/material/AccordionSummary'; -import Box from '@mui/material/Box'; -import Checkbox from '@mui/material/Checkbox'; -import grey from '@mui/material/colors/grey'; -import IconButton from '@mui/material/IconButton'; -import List from '@mui/material/List'; -import Stack from '@mui/material/Stack'; -import Typography from '@mui/material/Typography'; -import { FeatureFlagGuard } from 'components/security/Guards'; -import { WarningSchema } from 'interfaces/useBioHubApi.interface'; - -export interface ISurveyBadDeploymentListItemProps { - data: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }>; - isChecked: boolean; - handleDelete: (deploymentId: number) => void; - handleCheckboxChange: (deploymentId: number) => void; -} - -/** - * Renders a list item for a single bad deployment record. - * - * @param {ISurveyBadDeploymentListItemProps {} props - * @return {*} - */ -export const SurveyBadDeploymentListItem = (props: ISurveyBadDeploymentListItemProps) => { - const { data, isChecked, handleDelete, handleCheckboxChange } = props; - - return ( - - - } - aria-controls="panel1bh-content" - sx={{ - flex: '1 1 auto', - py: 0, - pr: 7, - pl: 0, - height: 75, - overflow: 'hidden', - '& .MuiAccordionSummary-content': { - flex: '1 1 auto', - py: 0, - pl: 0, - overflow: 'hidden', - whiteSpace: 'nowrap' - } - }}> - - {/* TODO: This delete is commented out as a temporary bug fix to prevent deployment data from being deleted */} - - }> - { - event.stopPropagation(); - handleCheckboxChange(data.data.sims_deployment_id); - }} - inputProps={{ 'aria-label': 'controlled' }} - /> - - - - - Unknown Deployment - - - - Something went wrong... - - - - - {/* TODO: This delete is commented out as a temporary bug fix to prevent deployment data from being deleted */} - - handleDelete(data.data.sims_deployment_id as number)} - aria-label="deployment-settings"> - - - - - - - - - Deployment {data.data.bctw_deployment_id as string} does not exist. You can remove this - deployment from the Survey. - - - - - - ); -}; diff --git a/app/src/features/surveys/telemetry/list/SurveyDeploymentList.tsx b/app/src/features/surveys/telemetry/list/SurveyDeploymentList.tsx index 0ab2c6003e..064f45383a 100644 --- a/app/src/features/surveys/telemetry/list/SurveyDeploymentList.tsx +++ b/app/src/features/surveys/telemetry/list/SurveyDeploymentList.tsx @@ -1,4 +1,4 @@ -import { mdiDotsVertical, mdiPencilOutline, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; +import { mdiCog, mdiDotsVertical, mdiPencilOutline, mdiTrashCanOutline } from '@mdi/js'; import Icon from '@mdi/react'; import Box from '@mui/material/Box'; import Button from '@mui/material/Button'; @@ -15,49 +15,24 @@ import Paper from '@mui/material/Paper'; import Stack from '@mui/material/Stack'; import Toolbar from '@mui/material/Toolbar'; import Typography from '@mui/material/Typography'; -import AlertBar from 'components/alert/AlertBar'; import { LoadingGuard } from 'components/loading/LoadingGuard'; import { SkeletonList } from 'components/loading/SkeletonLoaders'; -import { SurveyBadDeploymentListItem } from 'features/surveys/telemetry/list/SurveyBadDeploymentListItem'; import { SurveyDeploymentListItem } from 'features/surveys/telemetry/list/SurveyDeploymentListItem'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { useDialogContext, useSurveyContext } from 'hooks/useContext'; +import { useCodesContext, useDialogContext, useSurveyContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; -import { WarningSchema } from 'interfaces/useBioHubApi.interface'; -import { IAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; import { useEffect, useState } from 'react'; import { Link as RouterLink } from 'react-router-dom'; - -export interface ISurveyDeploymentListProps { - deployments: IAnimalDeployment[]; - badDeployments: WarningSchema<{ - sims_deployment_id: number; - bctw_deployment_id: string; - }>[]; - /** - * Flag to indicate if the deployments are loading. - * - * @type {boolean} - * @memberof ISurveyDeploymentListProps - */ - isLoading: boolean; - /** - * Refresh the deployments. - * - * @memberof ISurveyDeploymentListProps - */ - refreshRecords: () => void; -} +import { ApiPaginationRequestOptions } from 'types/misc'; /** * Renders a list of all deployments in the survey * * @returns {*} */ -export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { - const { deployments, badDeployments, isLoading, refreshRecords } = props; - +export const SurveyDeploymentList = () => { const dialogContext = useDialogContext(); + const codesContext = useCodesContext(); const surveyContext = useSurveyContext(); const biohubApi = useBiohubApi(); @@ -68,15 +43,19 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { const [checkboxSelectedIds, setCheckboxSelectedIds] = useState([]); const [selectedDeploymentId, setSelectedDeploymentId] = useState(); - const frequencyUnitDataLoader = useDataLoader(() => biohubApi.telemetry.getCodeValues('frequency_unit')); - const deviceMakesDataLoader = useDataLoader(() => biohubApi.telemetry.getCodeValues('device_make')); + const deploymentDataLoader = useDataLoader((pagination?: ApiPaginationRequestOptions) => + biohubApi.telemetryDeployment.getDeploymentsInSurvey(surveyContext.projectId, surveyContext.surveyId, pagination) + ); - const deploymentCount = (deployments?.length ?? 0) + (badDeployments?.length ?? 0); + const deployments = deploymentDataLoader.data?.deployments ?? []; + const deploymentsCount = deploymentDataLoader.data?.count ?? 0; + /** + * Load the deployments and telemetry data when the page is initially loaded. + */ useEffect(() => { - frequencyUnitDataLoader.load(); - deviceMakesDataLoader.load(); - }, [deviceMakesDataLoader, frequencyUnitDataLoader, surveyContext.projectId, surveyContext.surveyId]); + deploymentDataLoader.load(); + }, [deploymentDataLoader]); const handleBulkActionMenuClick = (event: React.MouseEvent) => { setBulkDeploymentAnchorEl(event.currentTarget); @@ -112,12 +91,12 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { * Callback for when the bulk delete deployment action is confirmed. */ const handleBulkDeleteDeployment = async () => { - await biohubApi.survey + await biohubApi.telemetryDeployment .deleteDeployments(surveyContext.projectId, surveyContext.surveyId, checkboxSelectedIds) .then(() => { dialogContext.setYesNoDialog({ open: false }); setBulkDeploymentAnchorEl(null); - refreshRecords(); + deploymentDataLoader.refresh(); }) .catch((error: any) => { dialogContext.setYesNoDialog({ open: false }); @@ -142,12 +121,12 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { * Callback for when the delete deployment action is confirmed. */ const handleDeleteDeployment = async (deploymentId: number) => { - await biohubApi.survey + await biohubApi.telemetryDeployment .deleteDeployment(surveyContext.projectId, surveyContext.surveyId, deploymentId) .then(() => { dialogContext.setYesNoDialog({ open: false }); setDeploymentAnchorEl(null); - refreshRecords(); + deploymentDataLoader.refresh(); }) .catch((error: any) => { dialogContext.setYesNoDialog({ open: false }); @@ -274,7 +253,7 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { }}> setDeploymentAnchorEl(null)}> @@ -311,7 +290,7 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { Deployments ‌ - ({deploymentCount}) + ({deploymentsCount}) @@ -319,9 +298,9 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { variant="contained" color="primary" component={RouterLink} - to={'deployment/create'} - startIcon={}> - Add + to={'manage'} + startIcon={}> + Manage { } isLoadingFallbackDelay={100} - hasNoData={!deploymentCount} + hasNoData={!deploymentsCount} hasNoDataFallback={ { sx={{ mr: 0.75 }} - checked={checkboxSelectedIds.length > 0 && checkboxSelectedIds.length === deploymentCount} + checked={checkboxSelectedIds.length > 0 && checkboxSelectedIds.length === deploymentsCount} indeterminate={ - checkboxSelectedIds.length >= 1 && checkboxSelectedIds.length < deploymentCount + checkboxSelectedIds.length >= 1 && checkboxSelectedIds.length < deploymentsCount } onClick={() => { - if (checkboxSelectedIds.length === deploymentCount) { + if (checkboxSelectedIds.length === deploymentsCount) { // Unselect all setCheckboxSelectedIds([]); return; @@ -392,10 +374,6 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { // Select all const deploymentIds = deployments.map((deployment) => deployment.deployment_id); - // const badDeploymentIds = badDeployments.map( - // (deployment) => deployment.data.sims_deployment_id - // ); - // TODO: Temporary bug fix - prevent bad deployment ids from being selected and deleted setCheckboxSelectedIds([...deploymentIds]); }} inputProps={{ 'aria-label': 'controlled' }} @@ -410,39 +388,18 @@ export const SurveyDeploymentList = (props: ISurveyDeploymentListProps) => { sx={{ background: grey[100] }}> - - {badDeployments.map((badDeployment) => { - return ( - renderDeleteDeploymentDialog(deploymentId)} - handleCheckboxChange={(deploymentId) => handleCheckboxChange(deploymentId)} - /> - ); - })} {deployments.map((deployment) => { const animal = surveyContext.critterDataLoader.data?.find( (animal) => animal.critterbase_critter_id === deployment.critterbase_critter_id ); - if (!animal) { - return null; - } - // Replace the deployment frequency_unit IDs with their human readable codes const hydratedDeployment = { ...deployment, frequency_unit: - frequencyUnitDataLoader.data?.find( - (frequencyUnitOption) => frequencyUnitOption.id === deployment.frequency_unit - )?.code ?? null + codesContext.codesDataLoader.data?.frequency_units.find( + (frequencyUnit) => frequencyUnit.id === deployment.frequency_unit_id + )?.name ?? null }; return ( diff --git a/app/src/features/surveys/telemetry/list/SurveyDeploymentListItem.tsx b/app/src/features/surveys/telemetry/list/SurveyDeploymentListItem.tsx index fe139225e9..1b8097bbd3 100644 --- a/app/src/features/surveys/telemetry/list/SurveyDeploymentListItem.tsx +++ b/app/src/features/surveys/telemetry/list/SurveyDeploymentListItem.tsx @@ -15,11 +15,11 @@ import { PulsatingDot } from 'components/misc/PulsatingDot'; import dayjs from 'dayjs'; import { SurveyDeploymentListItemDetails } from 'features/surveys/telemetry/list/SurveyDeploymentListItemDetails'; import { ICritterSimpleResponse } from 'interfaces/useCritterApi.interface'; -import { IAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { TelemetryDeployment } from 'interfaces/useTelemetryDeploymentApi.interface'; export interface ISurveyDeploymentListItemProps { - animal: ICritterSimpleResponse; - deployment: Omit & { frequency_unit: string | null }; + animal?: ICritterSimpleResponse; + deployment: Omit & { frequency_unit: string | null }; isChecked: boolean; handleDeploymentMenuClick: (event: React.MouseEvent, deploymentId: number) => void; handleCheckboxChange: (deploymentId: number) => void; @@ -101,14 +101,14 @@ export const SurveyDeploymentListItem = (props: ISurveyDeploymentListItemProps) overflow: 'hidden', textOverflow: 'ellipsis' }}> - {deployment.device_id} + {deployment.serial} {deployment.frequency} {deployment.frequency_unit} - {animal.animal_id} + {`${deployment.deployment_id}: ${animal?.animal_id || 'Unknown'}`} diff --git a/app/src/features/surveys/telemetry/list/SurveyDeploymentListItemDetails.tsx b/app/src/features/surveys/telemetry/list/SurveyDeploymentListItemDetails.tsx index 7c1d130914..5c232a457b 100644 --- a/app/src/features/surveys/telemetry/list/SurveyDeploymentListItemDetails.tsx +++ b/app/src/features/surveys/telemetry/list/SurveyDeploymentListItemDetails.tsx @@ -8,11 +8,11 @@ import { DATE_FORMAT, TIME_FORMAT } from 'constants/dateTimeFormats'; import dayjs from 'dayjs'; import { useCritterbaseApi } from 'hooks/useCritterbaseApi'; import useDataLoader from 'hooks/useDataLoader'; -import { IAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { TelemetryDeployment } from 'interfaces/useTelemetryDeploymentApi.interface'; import { useEffect } from 'react'; interface ISurveyDeploymentListItemDetailsProps { - deployment: Omit & { frequency_unit: string | null }; + deployment: Omit & { frequency_unit: string | null }; } /** @@ -49,10 +49,21 @@ export const SurveyDeploymentListItemDetails = (props: ISurveyDeploymentListItem const endDateFormatted = endDate ? dayjs(endDate).format(DATE_FORMAT.MediumDateFormat) : null; - if (!startCaptureDataLoader.data) { + if (startCaptureDataLoader.isLoading || !startCaptureDataLoader.isReady) { return ; } + if (!startCaptureDataLoader.data) { + // A Critterbase capture record could not be fetched, or does not exist (which should not happen) + return ( + + + {'Could not load animal capture data.'} + + + ); + } + const startDate = dayjs(startCaptureDataLoader.data.capture_date).format(DATE_FORMAT.MediumDateFormat); const startTime = startCaptureDataLoader.data.capture_time; diff --git a/app/src/features/surveys/telemetry/manage/DevicesAndDeploymentsManageHeader.tsx b/app/src/features/surveys/telemetry/manage/DevicesAndDeploymentsManageHeader.tsx new file mode 100644 index 0000000000..c2a9cffa13 --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/DevicesAndDeploymentsManageHeader.tsx @@ -0,0 +1,50 @@ +import Breadcrumbs from '@mui/material/Breadcrumbs'; +import Link from '@mui/material/Link'; +import Typography from '@mui/material/Typography'; +import PageHeader from 'components/layout/PageHeader'; +import { Link as RouterLink } from 'react-router-dom'; + +export interface DevicesAndDeploymentsManageHeaderProps { + project_id: number; + project_name: string; + survey_id: number; + survey_name: string; +} + +/** + * Header for the telemetry device and deployment manage page. + * + * @param {DevicesAndDeploymentsManageHeaderProps} props + * @return {*} + */ +export const DevicesAndDeploymentsManageHeader = (props: DevicesAndDeploymentsManageHeaderProps) => { + const { project_id, project_name, survey_id, survey_name } = props; + + return ( + '}> + + {project_name} + + + {survey_name} + + + Manage Telemetry + + + Manage Devices and Deployments + + + } + /> + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/DevicesAndDeploymentsManagePage.tsx b/app/src/features/surveys/telemetry/manage/DevicesAndDeploymentsManagePage.tsx new file mode 100644 index 0000000000..14d9328320 --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/DevicesAndDeploymentsManagePage.tsx @@ -0,0 +1,38 @@ +import Container from '@mui/material/Container'; +import Paper from '@mui/material/Paper'; +import Stack from '@mui/material/Stack'; + +import { DeploymentsContainer } from 'features/surveys/telemetry/manage/deployments/table/DeploymentsContainer'; +import { DevicesContainer } from 'features/surveys/telemetry/manage/devices/table/DevicesContainer'; +import { DevicesAndDeploymentsManageHeader } from 'features/surveys/telemetry/manage/DevicesAndDeploymentsManageHeader'; +import { useProjectContext, useSurveyContext } from 'hooks/useContext'; + +/** + * Page for managing telemetry device and deployment information. + * + * @return {*} + */ +export const DevicesAndDeploymentsManagePage = () => { + const projectContext = useProjectContext(); + const surveyContext = useSurveyContext(); + + return ( + + + + + + + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/deployments/create/CreateDeploymentPage.tsx b/app/src/features/surveys/telemetry/manage/deployments/create/CreateDeploymentPage.tsx similarity index 69% rename from app/src/features/surveys/telemetry/deployments/create/CreateDeploymentPage.tsx rename to app/src/features/surveys/telemetry/manage/deployments/create/CreateDeploymentPage.tsx index 55c2332bc2..e9d89e662e 100644 --- a/app/src/features/surveys/telemetry/deployments/create/CreateDeploymentPage.tsx +++ b/app/src/features/surveys/telemetry/manage/deployments/create/CreateDeploymentPage.tsx @@ -6,12 +6,12 @@ import { DeploymentForm, DeploymentFormInitialValues, DeploymentFormYupSchema -} from 'features/surveys/telemetry/deployments/components/form/DeploymentForm'; -import { DeploymentFormHeader } from 'features/surveys/telemetry/deployments/components/form/DeploymentFormHeader'; +} from 'features/surveys/telemetry/manage/deployments/form/DeploymentForm'; +import { DeploymentFormHeader } from 'features/surveys/telemetry/manage/deployments/form/DeploymentFormHeader'; import { Formik, FormikProps } from 'formik'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { useDialogContext, useProjectContext, useSurveyContext, useTelemetryDataContext } from 'hooks/useContext'; +import { useDialogContext, useProjectContext, useSurveyContext } from 'hooks/useContext'; import { SKIP_CONFIRMATION_DIALOG, useUnsavedChangesDialog } from 'hooks/useUnsavedChangesDialog'; import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; import { useRef, useState } from 'react'; @@ -30,14 +30,12 @@ export const CreateDeploymentPage = () => { const dialogContext = useDialogContext(); const projectContext = useProjectContext(); const surveyContext = useSurveyContext(); - const telemetryDataContext = useTelemetryDataContext(); - - const formikRef = useRef>(null); - const [isSubmitting, setIsSubmitting] = useState(false); const { locationChangeInterceptor } = useUnsavedChangesDialog(); - const critters = surveyContext.critterDataLoader.data ?? []; + const formikRef = useRef>(null); + + const [isSubmitting, setIsSubmitting] = useState(false); if (!surveyContext.surveyDataLoader.data || !projectContext.projectDataLoader.data) { return ; @@ -47,30 +45,27 @@ export const CreateDeploymentPage = () => { setIsSubmitting(true); try { - const critter_id = Number(critters?.find((animal) => animal.critter_id === values.critter_id)?.critter_id); - - if (!critter_id) { - throw new Error('Invalid critter data'); - } - - await biohubApi.survey.createDeployment(surveyContext.projectId, surveyContext.surveyId, critter_id, { - device_id: Number(values.device_id), - device_make: values.device_make, - frequency: values.frequency || null, - frequency_unit: values.frequency_unit, - device_model: values.device_model, - critterbase_start_capture_id: values.critterbase_start_capture_id, - critterbase_end_capture_id: values.critterbase_end_capture_id, - critterbase_end_mortality_id: values.critterbase_end_mortality_id, - attachment_end_date: values.attachment_end_date, - attachment_end_time: values.attachment_end_time - }); - - telemetryDataContext.deploymentsDataLoader.refresh(surveyContext.projectId, surveyContext.surveyId); + await biohubApi.telemetryDeployment.createDeployment( + surveyContext.projectId, + surveyContext.surveyId, + values.critter_id, + { + device_id: values.device_id, + frequency: values.frequency, + frequency_unit_id: values.frequency_unit_id, + attachment_start_date: values.attachment_start_date, + attachment_start_time: values.attachment_start_time, + attachment_end_date: values.attachment_end_date, + attachment_end_time: values.attachment_end_time, + critterbase_start_capture_id: values.critterbase_start_capture_id, + critterbase_end_capture_id: values.critterbase_end_capture_id, + critterbase_end_mortality_id: values.critterbase_end_mortality_id + } + ); // create complete, navigate back to telemetry page history.push( - `/admin/projects/${surveyContext.projectId}/surveys/${surveyContext.surveyId}/telemetry`, + `/admin/projects/${surveyContext.projectId}/surveys/${surveyContext.surveyId}/telemetry/manage`, SKIP_CONFIRMATION_DIALOG ); } catch (error) { @@ -110,7 +105,7 @@ export const CreateDeploymentPage = () => { survey_name={surveyContext.surveyDataLoader.data.surveyData.survey_details.survey_name} is_submitting={isSubmitting} title="Add Deployment" - breadcrumb="Add Deployments" + breadcrumb="Add Deployment" /> diff --git a/app/src/features/surveys/telemetry/deployments/edit/EditDeploymentPage.tsx b/app/src/features/surveys/telemetry/manage/deployments/edit/EditDeploymentPage.tsx similarity index 64% rename from app/src/features/surveys/telemetry/deployments/edit/EditDeploymentPage.tsx rename to app/src/features/surveys/telemetry/manage/deployments/edit/EditDeploymentPage.tsx index 68440c4ee3..91e691541d 100644 --- a/app/src/features/surveys/telemetry/deployments/edit/EditDeploymentPage.tsx +++ b/app/src/features/surveys/telemetry/manage/deployments/edit/EditDeploymentPage.tsx @@ -5,12 +5,12 @@ import { EditAnimalDeploymentI18N } from 'constants/i18n'; import { DeploymentForm, DeploymentFormYupSchema -} from 'features/surveys/telemetry/deployments/components/form/DeploymentForm'; -import { DeploymentFormHeader } from 'features/surveys/telemetry/deployments/components/form/DeploymentFormHeader'; +} from 'features/surveys/telemetry/manage/deployments/form/DeploymentForm'; +import { DeploymentFormHeader } from 'features/surveys/telemetry/manage/deployments/form/DeploymentFormHeader'; import { Formik, FormikProps } from 'formik'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { useDialogContext, useProjectContext, useSurveyContext, useTelemetryDataContext } from 'hooks/useContext'; +import { useDialogContext, useProjectContext, useSurveyContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; import { SKIP_CONFIRMATION_DIALOG, useUnsavedChangesDialog } from 'hooks/useUnsavedChangesDialog'; import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; @@ -30,31 +30,29 @@ export const EditDeploymentPage = () => { const dialogContext = useDialogContext(); const projectContext = useProjectContext(); const surveyContext = useSurveyContext(); - const telemetryDataContext = useTelemetryDataContext(); - - const formikRef = useRef>(null); - const [isSubmitting, setIsSubmitting] = useState(false); const { locationChangeInterceptor } = useUnsavedChangesDialog(); const urlParams: Record = useParams(); const deploymentId: number | undefined = Number(urlParams['deployment_id']); - const critters = surveyContext.critterDataLoader.data ?? []; + const formikRef = useRef>(null); - const deploymentDataLoader = useDataLoader(biohubApi.survey.getDeploymentById); + const [isSubmitting, setIsSubmitting] = useState(false); - useEffect(() => { - deploymentDataLoader.load(surveyContext.projectId, surveyContext.surveyId, deploymentId); - }, [deploymentDataLoader, deploymentId, surveyContext.projectId, surveyContext.surveyId]); + const deploymentDataLoader = useDataLoader(() => + biohubApi.telemetryDeployment.getDeploymentById(surveyContext.projectId, surveyContext.surveyId, deploymentId) + ); - if (!surveyContext.surveyDataLoader.data || !projectContext.projectDataLoader.data || !deploymentDataLoader.data) { - return ; - } + useEffect(() => { + if (!deploymentId) { + return; + } - const badDeployment = deploymentDataLoader.data.bad_deployment; + deploymentDataLoader.load(); + }, [deploymentDataLoader, deploymentId, surveyContext.projectId, surveyContext.surveyId]); - if (badDeployment) { + if (!projectContext.projectDataLoader.data || !surveyContext.surveyDataLoader.data || !deploymentDataLoader.data) { return ; } @@ -62,47 +60,44 @@ export const EditDeploymentPage = () => { const deploymentFormInitialValues = { critter_id: deployment.critter_id, - device_id: String(deployment.device_id), + device_id: deployment.device_id, frequency: deployment.frequency, - frequency_unit: deployment.frequency_unit, - device_model: deployment.device_model, - device_make: deployment.device_make, + frequency_unit_id: deployment.frequency_unit_id, + attachment_start_date: deployment.attachment_start_date, + attachment_start_time: deployment.attachment_start_time, + attachment_end_date: deployment.attachment_end_date, + attachment_end_time: deployment.attachment_end_time, critterbase_start_capture_id: deployment.critterbase_start_capture_id, critterbase_end_capture_id: deployment.critterbase_end_capture_id, - critterbase_end_mortality_id: deployment.critterbase_end_mortality_id, - attachment_end_date: deployment.attachment_end_date, - attachment_end_time: deployment.attachment_end_time + critterbase_end_mortality_id: deployment.critterbase_end_mortality_id }; const handleSubmit = async (values: ICreateAnimalDeployment) => { setIsSubmitting(true); try { - const critter_id = Number(critters?.find((animal) => animal.critter_id === values.critter_id)?.critter_id); - - if (!critter_id) { - throw new Error('Invalid critter data'); - } - - await biohubApi.survey.updateDeployment(surveyContext.projectId, surveyContext.surveyId, deploymentId, { - critter_id: values.critter_id, - device_id: Number(values.device_id), - device_make: values.device_make, - frequency: values.frequency || null, // nullify if empty string - frequency_unit: values.frequency_unit, - device_model: values.device_model, - critterbase_start_capture_id: values.critterbase_start_capture_id, - critterbase_end_capture_id: values.critterbase_end_capture_id, - critterbase_end_mortality_id: values.critterbase_end_mortality_id, - attachment_end_date: values.attachment_end_date, - attachment_end_time: values.attachment_end_time - }); - - telemetryDataContext.deploymentsDataLoader.refresh(surveyContext.projectId, surveyContext.surveyId); + await biohubApi.telemetryDeployment.updateDeployment( + surveyContext.projectId, + surveyContext.surveyId, + deploymentId, + { + critter_id: values.critter_id, + device_id: values.device_id, + frequency: values.frequency, + frequency_unit_id: values.frequency_unit_id, + attachment_start_date: values.attachment_start_date, + attachment_start_time: values.attachment_start_time, + attachment_end_date: values.attachment_end_date, + attachment_end_time: values.attachment_end_time, + critterbase_start_capture_id: values.critterbase_start_capture_id, + critterbase_end_capture_id: values.critterbase_end_capture_id, + critterbase_end_mortality_id: values.critterbase_end_mortality_id + } + ); // edit complete, navigate back to telemetry page history.push( - `/admin/projects/${surveyContext.projectId}/surveys/${surveyContext.surveyId}/telemetry`, + `/admin/projects/${surveyContext.projectId}/surveys/${surveyContext.surveyId}/telemetry/manage`, SKIP_CONFIRMATION_DIALOG ); } catch (error) { @@ -146,7 +141,7 @@ export const EditDeploymentPage = () => { breadcrumb="Edit Deployment" /> - + diff --git a/app/src/features/surveys/telemetry/manage/deployments/form/DeploymentForm.tsx b/app/src/features/surveys/telemetry/manage/deployments/form/DeploymentForm.tsx new file mode 100644 index 0000000000..e18643f9aa --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/form/DeploymentForm.tsx @@ -0,0 +1,190 @@ +import LoadingButton from '@mui/lab/LoadingButton/LoadingButton'; +import Button from '@mui/material/Button'; +import Container from '@mui/material/Container'; +import Divider from '@mui/material/Divider'; +import Paper from '@mui/material/Paper'; +import Stack from '@mui/material/Stack'; +import Typography from '@mui/material/Typography'; +import HorizontalSplitFormComponent from 'components/fields/HorizontalSplitFormComponent'; +import { + DeploymentDetailsForm, + DeploymentDetailsFormInitialValues, + DeploymentDetailsFormYupSchema +} from 'features/surveys/telemetry/manage/deployments/form/deployment-details/DeploymentDetailsForm'; +import { useFormikContext } from 'formik'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useCodesContext, useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { useEffect } from 'react'; +import { useHistory } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { + DeploymentEndForm, + DeploymentEndFormInitialValues, + DeploymentEndFormYupSchema +} from './timeline/DeploymentEndForm'; +import { + DeploymentStartForm, + DeploymentStartFormInitialValues, + DeploymentStartFormYupSchema +} from './timeline/DeploymentStartForm'; + +export const DeploymentFormInitialValues = { + ...DeploymentDetailsFormInitialValues, + ...DeploymentStartFormInitialValues, + ...DeploymentEndFormInitialValues +}; + +export const DeploymentFormYupSchema = + DeploymentDetailsFormYupSchema.concat(DeploymentStartFormYupSchema).concat(DeploymentEndFormYupSchema); + +interface IDeploymentFormProps { + isSubmitting: boolean; +} + +/** + * Deployment form component. + * + * @param {IDeploymentFormProps} props + * @return {*} + */ +export const DeploymentForm = (props: IDeploymentFormProps) => { + const { isSubmitting } = props; + + const { submitForm, values } = useFormikContext(); + + const codesContext = useCodesContext(); + const surveyContext = useSurveyContext(); + + const biohubApi = useBiohubApi(); + + const history = useHistory(); + + // Fetch all devices for the survey + const devicesDataLoader = useDataLoader(() => + biohubApi.telemetryDevice.getDevicesInSurvey(surveyContext.projectId, surveyContext.surveyId) + ); + + // Fetch all critters for the survey + const crittersDataLoader = useDataLoader(() => + biohubApi.survey.getSurveyCritters(surveyContext.projectId, surveyContext.surveyId) + ); + + useEffect(() => { + codesContext.codesDataLoader.load(); + devicesDataLoader.load(); + crittersDataLoader.load(); + }, [ + codesContext.codesDataLoader, + crittersDataLoader, + devicesDataLoader, + surveyContext.projectId, + surveyContext.surveyId + ]); + + // Fetch a single critter's data + const critterDataLoader = useDataLoader((critterId: number) => + biohubApi.survey.getCritterById(surveyContext.projectId, surveyContext.surveyId, critterId) + ); + + // Fetch individual critter data when critter_id changes (ie. when the user selects a critter) + useEffect(() => { + if (values.critter_id) { + critterDataLoader.refresh(values.critter_id); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [values.critter_id]); + + return ( + + + + + Enter information about the device and animal. + + You must  + + add the device + +  and  + + animal + +  to your Survey before associating the two in a deployment. + + + }> + ({ + label: frequencyUnit.name, + value: frequencyUnit.id + })) ?? [] + } + /> + + + + + + + + + + + + + + + + + + { + submitForm(); + }}> + Save and Exit + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/deployments/form/DeploymentFormHeader.tsx b/app/src/features/surveys/telemetry/manage/deployments/form/DeploymentFormHeader.tsx new file mode 100644 index 0000000000..e3bffcc7ea --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/form/DeploymentFormHeader.tsx @@ -0,0 +1,110 @@ +import { LoadingButton } from '@mui/lab'; +import Breadcrumbs from '@mui/material/Breadcrumbs'; +import Button from '@mui/material/Button'; +import { grey } from '@mui/material/colors'; +import Container from '@mui/material/Container'; +import Link from '@mui/material/Link'; +import Paper from '@mui/material/Paper'; +import Stack from '@mui/material/Stack'; +import Typography from '@mui/material/Typography'; +import { useFormikContext } from 'formik'; +import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { useHistory } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; + +export interface IDeploymentFormHeaderProps { + project_id: number; + project_name: string; + survey_id: number; + survey_name: string; + is_submitting: boolean; + title: string; + breadcrumb: string; +} + +/** + * Renders the header of the create and edit deployment pages. + * + * @param {IDeploymentFormHeaderProps} props + * @return {*} + */ +export const DeploymentFormHeader = (props: IDeploymentFormHeaderProps) => { + const history = useHistory(); + const formikProps = useFormikContext(); + + const { project_id, survey_id, survey_name, project_name, is_submitting, title, breadcrumb } = props; + + return ( + + + + + {project_name} + + + {survey_name} + + + Manage Telemetry + + + Manage Devices and Deployments + + + {breadcrumb} + + + + + {title} + + + { + formikProps.submitForm(); + }}> + Save and Exit + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/deployments/form/deployment-details/DeploymentDetailsForm.tsx b/app/src/features/surveys/telemetry/manage/deployments/form/deployment-details/DeploymentDetailsForm.tsx new file mode 100644 index 0000000000..091b7265d2 --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/form/deployment-details/DeploymentDetailsForm.tsx @@ -0,0 +1,131 @@ +import Grid from '@mui/material/Grid'; +import Stack from '@mui/material/Stack'; +import TextField from '@mui/material/TextField'; +import { AnimalAutocompleteField } from 'components/fields/AnimalAutocompleteField'; +import AutocompleteField, { IAutocompleteFieldOption } from 'components/fields/AutocompleteField'; +import { DeviceAutocompleteField } from 'components/fields/DeviceAutocompleteField'; +import { useFormikContext } from 'formik'; +import { ICritterSimpleResponse } from 'interfaces/useCritterApi.interface'; +import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { TelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import get from 'lodash-es/get'; +import { numberOrNull } from 'utils/string-utils'; +import { isDefined } from 'utils/Utils'; +import yup from 'utils/YupSchema'; + +export const DeploymentDetailsFormInitialValues: yup.InferType = { + device_id: null as unknown as number, + critter_id: null as unknown as number, + frequency: null as unknown as number, + frequency_unit_id: null as unknown as number +}; + +export const DeploymentDetailsFormYupSchema = yup.object({ + device_id: yup.number().nullable().required('You must enter the device ID. This is typically the serial number'), + critter_id: yup.number().nullable().required('You must select the animal that the device is associated to'), + frequency: yup.lazy(() => + yup + .number() + .nullable() + .default(null) + .when('frequency_unit_id', { + is: (frequency_unit_id: number) => isDefined(frequency_unit_id), // when frequency_unit_id is defined + then: yup.number().nullable().default(null).required('Frequency is required') + }) + ), + frequency_unit_id: yup.lazy(() => + yup + .number() + .nullable() + .default(null) + .when('frequency', { + is: (frequency: number) => isDefined(frequency), // when frequency is defined + then: yup.number().nullable().default(null).required('Frequency unit is required') + }) + ) +}); + +interface IDeploymentDetailsFormProps { + surveyAnimals: ICritterSimpleResponse[]; + surveyDevices: TelemetryDevice[]; + frequencyUnits: IAutocompleteFieldOption[]; +} + +/** + * Deployment form - deployment details section. + * + * @param {IDeploymentDetailsFormProps} props + * @return {*} + */ +export const DeploymentDetailsForm = (props: IDeploymentDetailsFormProps) => { + const { surveyAnimals, surveyDevices, frequencyUnits } = props; + + const { setFieldValue, values, touched, errors, handleBlur } = useFormikContext(); + + return ( + + + device.device_id === values.device_id)} + required + clearOnSelect + onSelect={(device: TelemetryDevice) => { + if (device) { + setFieldValue('device_id', device.device_id); + } + }} + /> + + + animal.critter_id === values.critter_id)} + required + clearOnSelect + onSelect={(animal: ICritterSimpleResponse) => { + if (animal) { + setFieldValue('critter_id', animal.critter_id); + } + }} + /> + + + + { + // Ensure that the formik value is set to null if the input is empty, and not empty-string + setFieldValue('frequency', numberOrNull(event.target.value)); + }} + onBlur={handleBlur} + variant="outlined" + value={get(values, 'frequency') || ''} // Ensure that the value is an empty string if it is null (controlled component) + error={get(touched, 'frequency') && Boolean(get(errors, 'frequency'))} + helperText={get(touched, 'frequency') && get(errors, 'frequency')} + sx={{ + flex: 0.8, + '& .MuiOutlinedInput-root': { borderTopRightRadius: 0, borderBottomRightRadius: 0 } + }} + /> + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentEndForm.tsx b/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentEndForm.tsx new file mode 100644 index 0000000000..32860d490d --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentEndForm.tsx @@ -0,0 +1,235 @@ +import Box from '@mui/material/Box'; +import Collapse from '@mui/material/Collapse'; +import FormControlLabel from '@mui/material/FormControlLabel'; +import Grid from '@mui/material/Grid'; +import Radio from '@mui/material/Radio'; +import RadioGroup from '@mui/material/RadioGroup'; +import Stack from '@mui/material/Stack'; +import Typography from '@mui/material/Typography'; +import AutocompleteField from 'components/fields/AutocompleteField'; +import { DateField } from 'components/fields/DateField'; +import { TimeField } from 'components/fields/TimeField'; +import { DATE_FORMAT } from 'constants/dateTimeFormats'; +import dayjs from 'dayjs'; +import { useFormikContext } from 'formik'; +import { useSurveyContext } from 'hooks/useContext'; +import { ICaptureResponse, IMortalityResponse } from 'interfaces/useCritterApi.interface'; +import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { useMemo, useState } from 'react'; +import { Link as RouterLink } from 'react-router-dom'; +import { TransitionGroup } from 'react-transition-group'; +import yup from 'utils/YupSchema'; + +// Types to know how the deployment ended, determining which form components to display +type DeploymentEndType = 'capture' | 'mortality' | 'fell_off'; + +export const DeploymentEndFormInitialValues: yup.InferType = { + attachment_end_date: null, + attachment_end_time: null, + critterbase_end_mortality_id: null, + critterbase_end_capture_id: null +}; + +export const DeploymentEndFormYupSchema = yup.object({ + attachment_end_date: yup.lazy(() => + yup + .string() + .nullable() + .default(null) + .when('attachment_end_time', { + is: (attachment_end_time: string | null) => attachment_end_time !== null, + then: yup.string().nullable().required('End date is required'), + otherwise: yup.string().nullable().default(null) + }) + ), + attachment_end_time: yup.string().nullable().default(null), + critterbase_end_mortality_id: yup.string().uuid().nullable().default(null), + critterbase_end_capture_id: yup.string().uuid().nullable().default(null) +}); + +interface IDeploymentEndFormProps { + captures: ICaptureResponse[]; + mortalities: IMortalityResponse[]; +} + +/** + * Deployment form - end of deployment details + * + * @param {IDeploymentEndFormProps} props + * @return {*} + */ +export const DeploymentEndForm = (props: IDeploymentEndFormProps) => { + const { captures, mortalities } = props; + + const formikProps = useFormikContext(); + + const { values, setFieldValue } = formikProps; + + // Determine the initial deployment end type based on the form values + const initialDeploymentEndType = useMemo(() => { + if (values.critterbase_end_mortality_id) { + return 'mortality'; + } else if (values.critterbase_end_capture_id) { + return 'capture'; + } else if (values.attachment_end_date) { + return 'fell_off'; + } else { + return null; + } + }, [values]); + + const [deploymentEndType, setDeploymentEndType] = useState(initialDeploymentEndType); + + const surveyContext = useSurveyContext(); + + return ( + + + + If applicable, select how the deployment ended. If due to a mortality, you must  + {values.critter_id ? ( + + report the mortality + + ) : ( + 'report the mortality' + )} +  before removing the device. + + + + } + label="Fell off" + onChange={() => { + setDeploymentEndType('fell_off'); + setFieldValue('critterbase_end_capture_id', null); + setFieldValue('critterbase_end_mortality_id', null); + }} + onClick={() => { + if (deploymentEndType === 'fell_off') { + // if the user clicks on the selected radio button, unselect it + setDeploymentEndType(null); + setFieldValue('attachment_end_date', null); + setFieldValue('attachment_end_time', null); + setFieldValue('critterbase_end_capture_id', null); + setFieldValue('critterbase_end_mortality_id', null); + } + }} + /> + } + label="Capture" + onChange={() => { + setDeploymentEndType('capture'); + setFieldValue('attachment_end_date', null); + setFieldValue('attachment_end_time', null); + setFieldValue('critterbase_end_mortality_id', null); + }} + onClick={() => { + if (deploymentEndType === 'capture') { + // if the user clicks on the selected radio button, unselect it + setDeploymentEndType(null); + setFieldValue('attachment_end_date', null); + setFieldValue('attachment_end_time', null); + setFieldValue('critterbase_end_capture_id', null); + setFieldValue('critterbase_end_mortality_id', null); + } + }} + /> + } + label="Mortality" + onChange={() => { + setDeploymentEndType('mortality'); + setFieldValue('attachment_end_date', null); + setFieldValue('attachment_end_time', null); + setFieldValue('critterbase_end_capture_id', null); + }} + onClick={() => { + if (deploymentEndType === 'mortality') { + // if the user clicks on the selected radio button, unselect it + setDeploymentEndType(null); + setFieldValue('attachment_end_date', null); + setFieldValue('attachment_end_time', null); + setFieldValue('critterbase_end_capture_id', null); + setFieldValue('critterbase_end_mortality_id', null); + } + }} + /> + + + + + + {deploymentEndType === 'capture' && ( + { + if (option?.value) { + setFieldValue('critterbase_end_capture_id', option.value); + } + }} + options={captures.map((capture) => ({ + value: capture.capture_id, + label: dayjs(capture.capture_date).format(DATE_FORMAT.LongDateTimeFormat) + }))} + sx={{ width: '100%' }} + /> + )} + {deploymentEndType === 'fell_off' && ( + + + + + )} + {deploymentEndType === 'mortality' && ( + + ({ + value: mortality.mortality_id, + label: dayjs(mortality.mortality_timestamp).format(DATE_FORMAT.LongDateTimeFormat) + }))} + sx={{ width: '100%' }} + /> + + )} + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentStartForm.tsx b/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentStartForm.tsx new file mode 100644 index 0000000000..86d013bb9a --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentStartForm.tsx @@ -0,0 +1,94 @@ +import Box from '@mui/material/Box'; +import Grid from '@mui/material/Grid'; +import AutocompleteField, { IAutocompleteFieldOption } from 'components/fields/AutocompleteField'; +import { DateField } from 'components/fields/DateField'; +import { TimeField } from 'components/fields/TimeField'; +import { DATE_FORMAT } from 'constants/dateTimeFormats'; +import dayjs from 'dayjs'; +import { useFormikContext } from 'formik'; +import { ICaptureResponse } from 'interfaces/useCritterApi.interface'; +import { ICreateAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { SyntheticEvent } from 'react'; +import yup from 'utils/YupSchema'; + +export const DeploymentStartFormInitialValues: yup.InferType = { + attachment_start_date: null as unknown as string, + attachment_start_time: null, + critterbase_start_capture_id: null as unknown as string +}; + +export const DeploymentStartFormYupSchema = yup.object({ + attachment_start_date: yup.string().nullable().required('Start date is required'), + attachment_start_time: yup.string().nullable().default(null), + + critterbase_start_capture_id: yup.string().nullable().required('You must select the initial capture event') +}); + +interface IDeploymentStartFormProps { + captures: ICaptureResponse[]; +} + +/** + * Deployment form - start of deployment form. + * + * @param {IDeploymentStartFormProps} props + * @return {*} + */ +export const DeploymentStartForm = (props: IDeploymentStartFormProps) => { + const { captures } = props; + + const formikProps = useFormikContext(); + + const { values, setFieldValue } = formikProps; + + return ( + + + ({ + value: capture.capture_id, + label: capture.capture_time + ? dayjs(`${capture.capture_date} ${capture.capture_time}`).format(DATE_FORMAT.LongDateTimeFormat) + : dayjs(`${capture.capture_date}`).format(DATE_FORMAT.MediumDateFormat) + }))} + onChange={(_: SyntheticEvent, value: IAutocompleteFieldOption | null) => { + // Get date of the capture to set attachment_start_date + if (value) { + const timestamp = dayjs(value.label); + const date = timestamp.format(DATE_FORMAT.ShortDateFormat); + const time = timestamp.format('HH:mm:ss'); + + setFieldValue('attachment_start_date', date); + setFieldValue('critterbase_start_capture_id', value.value); + // Set capture time if it exists on the selected capture + if (time) { + setFieldValue('attachment_start_time', time); + } + } + }} + required + /> + + + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/deployments/components/form/timeline/DeploymentTimelineForm.tsx b/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentTimelineForm.tsx similarity index 88% rename from app/src/features/surveys/telemetry/deployments/components/form/timeline/DeploymentTimelineForm.tsx rename to app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentTimelineForm.tsx index 667ec1691d..37b7bac36a 100644 --- a/app/src/features/surveys/telemetry/deployments/components/form/timeline/DeploymentTimelineForm.tsx +++ b/app/src/features/surveys/telemetry/manage/deployments/form/timeline/DeploymentTimelineForm.tsx @@ -24,37 +24,33 @@ import yup from 'utils/YupSchema'; type DeploymentEndType = 'capture' | 'mortality' | 'fell_off'; export const DeploymentTimelineFormInitialValues: yup.InferType = { + attachment_start_date: null as unknown as string, + attachment_start_time: null, + attachment_end_date: null, + attachment_end_time: null, critterbase_start_capture_id: null as unknown as string, critterbase_end_mortality_id: null, - critterbase_end_capture_id: null, - attachment_end_date: null, - attachment_end_time: null + critterbase_end_capture_id: null }; export const DeploymentTimelineFormYupSchema = yup.object({ - critterbase_start_capture_id: yup.string().nullable().required('You must select the initial capture event'), - critterbase_end_mortality_id: yup.string().uuid().nullable(), - critterbase_end_capture_id: yup.string().uuid().nullable(), + attachment_start_date: yup.string().nullable().required('Start date is required'), + attachment_start_time: yup.string().nullable().default(null), attachment_end_date: yup.lazy(() => yup .string() .nullable() + .default(null) .when('attachment_end_time', { is: (attachment_end_time: string | null) => attachment_end_time !== null, - then: yup.string().nullable().required('End Date is required'), - otherwise: yup.string().nullable() + then: yup.string().nullable().required('End date is required'), + otherwise: yup.string().nullable().default(null) }) ), - attachment_end_time: yup.lazy(() => - yup - .string() - .nullable() - .when('attachment_end_date', { - is: (attachment_end_date: string | null) => attachment_end_date !== null, - then: yup.string().nullable().required('End time is required'), - otherwise: yup.string().nullable() - }) - ) + attachment_end_time: yup.string().nullable().default(null), + critterbase_start_capture_id: yup.string().nullable().required('You must select the initial capture event'), + critterbase_end_mortality_id: yup.string().uuid().nullable().default(null), + critterbase_end_capture_id: yup.string().uuid().nullable().default(null) }); interface IDeploymentTimelineFormProps { @@ -96,8 +92,9 @@ export const DeploymentTimelineForm = (props: IDeploymentTimelineFormProps) => { - Start of deployment + Capture event + You must  {values.critter_id ? ( @@ -126,7 +123,27 @@ export const DeploymentTimelineForm = (props: IDeploymentTimelineFormProps) => { /> - + + + Start of deployment + + + + You must specify the start date of the deployment. + + + + + + + + + End of deployment (optional) @@ -195,7 +212,6 @@ export const DeploymentTimelineForm = (props: IDeploymentTimelineFormProps) => { } - disabled={!mortalities.length} label="Mortality" onChange={() => { setDeploymentEndType('mortality'); diff --git a/app/src/features/surveys/telemetry/manage/deployments/table/DeploymentsContainer.tsx b/app/src/features/surveys/telemetry/manage/deployments/table/DeploymentsContainer.tsx new file mode 100644 index 0000000000..8e5f15acca --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/table/DeploymentsContainer.tsx @@ -0,0 +1,189 @@ +import { mdiArrowTopRight, mdiDotsVertical, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; +import Icon from '@mdi/react'; +import Box from '@mui/material/Box'; +import Button from '@mui/material/Button'; +import Divider from '@mui/material/Divider'; +import IconButton from '@mui/material/IconButton'; +import ListItemIcon from '@mui/material/ListItemIcon'; +import ListItemText from '@mui/material/ListItemText'; +import Menu from '@mui/material/Menu'; +import MenuItem from '@mui/material/MenuItem'; +import Toolbar from '@mui/material/Toolbar'; +import Typography from '@mui/material/Typography'; +import { GridRowSelectionModel } from '@mui/x-data-grid'; +import { LoadingGuard } from 'components/loading/LoadingGuard'; +import { SkeletonTable } from 'components/loading/SkeletonLoaders'; +import { NoDataOverlay } from 'components/overlay/NoDataOverlay'; +import { FOREIGN_KEY_CONSTRAINT_ERROR } from 'constants/errors'; +import { DeploymentsTable } from 'features/surveys/telemetry/manage/deployments/table/DeploymentsTable'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useDialogContext, useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { useEffect, useState } from 'react'; +import { Link as RouterLink } from 'react-router-dom'; + +export const DeploymentsContainer = () => { + const dialogContext = useDialogContext(); + const surveyContext = useSurveyContext(); + + const biohubApi = useBiohubApi(); + + // State for bulk actions + const [headerAnchorEl, setHeaderAnchorEl] = useState(null); + const [selectedRows, setSelectedRows] = useState([]); + + const deploymentsDataLoader = useDataLoader((projectId: number, surveyId: number) => + biohubApi.telemetryDeployment.getDeploymentsInSurvey(projectId, surveyId) + ); + + useEffect(() => { + deploymentsDataLoader.load(surveyContext.projectId, surveyContext.surveyId); + }, [deploymentsDataLoader, surveyContext.projectId, surveyContext.surveyId]); + + const deployments = deploymentsDataLoader.data?.deployments ?? []; + const deploymentsCount = deploymentsDataLoader.data?.count ?? 0; + + // Handler for bulk delete operation + const handleBulkDelete = async () => { + try { + await biohubApi.telemetryDeployment.deleteDeployments( + surveyContext.projectId, + surveyContext.surveyId, + selectedRows.map((id) => Number(id)) + ); + dialogContext.setYesNoDialog({ open: false }); // Close confirmation dialog + setSelectedRows([]); // Clear selection + onDelete(); // Refresh data + } catch (error) { + dialogContext.setYesNoDialog({ open: false }); // Close confirmation dialog on error + setSelectedRows([]); // Clear selection + // Show snackbar with error message + dialogContext.setSnackbar({ + snackbarMessage: ( + <> + + Error Deleting Deployments + + {String(error).includes(FOREIGN_KEY_CONSTRAINT_ERROR) ? ( + + You must delete telemetry data from these deployments before deleting the deployments. + + ) : ( + + {String(error)} + + )} + + ), + open: true + }); + } + }; + + // Handler for clicking on header menu (bulk actions) + const handleHeaderMenuClick = (event: React.MouseEvent) => { + setHeaderAnchorEl(event.currentTarget); + }; + + // Handler for confirming bulk delete operation + const handlePromptConfirmBulkDelete = () => { + setHeaderAnchorEl(null); // Close header menu + dialogContext.setYesNoDialog({ + dialogTitle: 'Delete Deployments?', + dialogContent: ( + + Are you sure you want to delete the selected deployments? + + ), + yesButtonLabel: 'Delete Deployments', + noButtonLabel: 'Cancel', + yesButtonProps: { color: 'error' }, + onClose: () => dialogContext.setYesNoDialog({ open: false }), + onNo: () => dialogContext.setYesNoDialog({ open: false }), + open: true, + onYes: handleBulkDelete + }); + }; + + const onDelete = () => { + deploymentsDataLoader.refresh(surveyContext.projectId, surveyContext.surveyId); + }; + + return ( + <> + {/* Bulk action menu */} + setHeaderAnchorEl(null)} + anchorEl={headerAnchorEl} + anchorOrigin={{ vertical: 'top', horizontal: 'right' }} + transformOrigin={{ vertical: 'top', horizontal: 'right' }}> + + + + + Delete + + + + + + Deployments ‌ + + ({deploymentsCount}) + + + + + + + + + + + + } + isLoadingFallbackDelay={100}> + + } + isLoadingFallbackDelay={100} + hasNoData={!deploymentsCount} + hasNoDataFallback={ + + } + hasNoDataFallbackDelay={100}> + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/deployments/table/DeploymentsTable.tsx b/app/src/features/surveys/telemetry/manage/deployments/table/DeploymentsTable.tsx new file mode 100644 index 0000000000..5ffa0b1acf --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/deployments/table/DeploymentsTable.tsx @@ -0,0 +1,360 @@ +import { mdiDotsVertical, mdiPencilOutline, mdiTrashCanOutline } from '@mdi/js'; +import Icon from '@mdi/react'; +import Box from '@mui/material/Box'; +import blue from '@mui/material/colors/blue'; +import green from '@mui/material/colors/green'; +import grey from '@mui/material/colors/grey'; +import IconButton from '@mui/material/IconButton'; +import ListItemIcon from '@mui/material/ListItemIcon'; +import ListItemText from '@mui/material/ListItemText'; +import Menu, { MenuProps } from '@mui/material/Menu'; +import MenuItem from '@mui/material/MenuItem'; +import Tooltip from '@mui/material/Tooltip'; +import Typography from '@mui/material/Typography'; +import { GridColDef, GridRowSelectionModel } from '@mui/x-data-grid'; +import ColouredRectangleChip from 'components/chips/ColouredRectangleChip'; +import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; +import { DATE_FORMAT } from 'constants/dateTimeFormats'; +import { FOREIGN_KEY_CONSTRAINT_ERROR } from 'constants/errors'; +import dayjs from 'dayjs'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useCodesContext, useDialogContext, useSurveyContext } from 'hooks/useContext'; +import { TelemetryDeployment } from 'interfaces/useTelemetryDeploymentApi.interface'; +import { useEffect, useState } from 'react'; +import { Link as RouterLink } from 'react-router-dom'; +import { combineDateTime } from 'utils/datetime'; + +export interface IDeploymentRowData { + id: number; + deployment_id: number; + critter_id: number; + device_id: number; + device_key: string; + frequency: number | null; + frequency_unit_id: number | null; + attachment_start_date: string; + attachment_start_time: string | null; + attachment_end_date: string | null; + attachment_end_time: string | null; + critterbase_start_capture_id: string; + critterbase_end_capture_id: string | null; + critterbase_end_mortality_id: string | null; +} + +interface IDeploymentsTableProps { + deployments: TelemetryDeployment[]; + selectedRows: GridRowSelectionModel; + setSelectedRows: (selection: GridRowSelectionModel) => void; + /** + * Callback fired when a deployment is deleted. + */ + onDelete?: () => void; +} + +/** + * Returns a table of telemetry deployments. + * + * @param {IDeploymentsTableProps} props + * @return {*} + */ +export const DeploymentsTable = (props: IDeploymentsTableProps) => { + const { deployments, selectedRows, setSelectedRows, onDelete } = props; + + const biohubApi = useBiohubApi(); + + const codesContext = useCodesContext(); + const dialogContext = useDialogContext(); + const surveyContext = useSurveyContext(); + + const [actionMenuDeploymentId, setActionMenuDeploymentId] = useState(); + const [actionMenuAnchorEl, setActionMenuAnchorEl] = useState(null); + + useEffect(() => { + codesContext.codesDataLoader.load(); + }, [codesContext.codesDataLoader]); + + const handleCloseActionMenu = () => { + setActionMenuAnchorEl(null); + }; + + const handleDeleteDeployment = async () => { + if (!actionMenuDeploymentId) { + return; + } + + await biohubApi.telemetryDeployment + .deleteDeployment(surveyContext.projectId, surveyContext.surveyId, actionMenuDeploymentId) + .then(() => { + dialogContext.setYesNoDialog({ open: false }); + setActionMenuAnchorEl(null); + onDelete?.(); + }) + .catch((error: any) => { + dialogContext.setYesNoDialog({ open: false }); + setActionMenuAnchorEl(null); + dialogContext.setSnackbar({ + snackbarMessage: ( + <> + + Error Deleting Deployment + + {String(error).includes(FOREIGN_KEY_CONSTRAINT_ERROR) ? ( + + You must delete telemetry data from this deployment before deleting the deployment. + + ) : ( + + {String(error)} + + )} + + ), + open: true + }); + }); + }; + + /** + * Display the delete deployment dialog. + */ + const deleteDeploymentDialog = () => { + dialogContext.setYesNoDialog({ + dialogTitle: 'Delete deployment?', + dialogText: 'Are you sure you want to permanently delete this deployment?', + yesButtonLabel: 'Delete Deployment', + noButtonLabel: 'Cancel', + yesButtonProps: { color: 'error' }, + onClose: () => { + dialogContext.setYesNoDialog({ open: false }); + }, + onNo: () => { + dialogContext.setYesNoDialog({ open: false }); + }, + open: true, + onYes: () => { + handleDeleteDeployment(); + } + }); + }; + + const rows: IDeploymentRowData[] = deployments.map((deployment) => ({ + id: deployment.deployment_id, + deployment_id: deployment.deployment_id, + critter_id: deployment.critter_id, + device_id: deployment.device_id, + device_key: deployment.device_key, + frequency: deployment.frequency, + frequency_unit_id: deployment.frequency_unit_id, + attachment_start_date: deployment.attachment_start_date, + attachment_start_time: deployment.attachment_start_time, + attachment_end_date: deployment.attachment_end_date, + attachment_end_time: deployment.attachment_end_time, + critterbase_start_capture_id: deployment.critterbase_start_capture_id, + critterbase_end_capture_id: deployment.critterbase_end_capture_id, + critterbase_end_mortality_id: deployment.critterbase_end_mortality_id + })); + + const columns: GridColDef[] = [ + { + field: 'deployment_id', + headerName: 'Deployment ID', + description: 'The unique key for the deployment', + width: 85, + minWidth: 85, + renderHeader: (params) => ( + + + ID + + + ), + renderCell: (params) => ( + + {params.row.deployment_id} + + ) + }, + { + field: 'critter_id', + headerName: 'Animal', + description: 'The nickname of the animal that the device is on', + flex: 1, + renderCell: (params) => ( + <> + { + surveyContext.critterDataLoader.data?.find((critter) => critter.critter_id === params.row.critter_id) + ?.animal_id + } + + ) + }, + { + field: 'device_key', + headerName: 'Device', + description: 'The serial number and make of the device that is deployed', + flex: 1, + renderCell: (params) => { + const [vendor, serial] = params.row.device_key.split(':'); + return ( + + {serial} + + {vendor} + + + ); + } + }, + { + field: 'frequency', + headerName: 'Frequency', + description: 'The frequency of the device', + flex: 1, + renderCell: (params) => ( + + {params.row.frequency}  + + {codesContext.codesDataLoader.data?.frequency_units.find( + (frequencyUnit) => frequencyUnit.id === params.row.frequency_unit_id + )?.name ?? null} + + + ) + }, + { + field: 'attachment_start_date', + headerName: 'Start', + description: 'The start date of the deployment', + flex: 1, + renderCell: (params) => ( + <> + {params.row.attachment_start_time + ? dayjs(`${params.row.attachment_start_date} ${params.row.attachment_start_time}`).format( + DATE_FORMAT.MediumDateTimeFormat + ) + : dayjs(params.row.attachment_start_date).format(DATE_FORMAT.MediumDateFormat)} + + ) + }, + { + field: 'attachment_end_date', + headerName: 'End', + description: 'The end date of the deployment', + flex: 1, + renderCell: (params) => { + if (!params.row.attachment_end_date) { + return null; + } + + if (params.row.attachment_end_time) { + return dayjs(`${params.row.attachment_end_date} ${params.row.attachment_end_time}`).format( + DATE_FORMAT.MediumDateTimeFormat + ); + } + + return dayjs(params.row.attachment_end_date).format(DATE_FORMAT.MediumDateFormat); + } + }, + { + field: 'status', + headerName: 'Status', + description: 'The status of the deployment, based on whether the end date has passed', + flex: 1, + renderCell: (params) => { + if ( + params.row.attachment_end_date && + dayjs().isBefore(combineDateTime(params.row.attachment_end_date, params.row.attachment_end_time)) + ) { + return ; + } + + return ; + } + }, + { + field: 'actions', + type: 'actions', + sortable: false, + width: 10, + align: 'right', + renderCell: (params) => { + return ( + + { + setActionMenuDeploymentId(params.row.deployment_id); + setActionMenuAnchorEl(event.currentTarget); + }}> + + + + ); + } + } + ]; + + return ( + <> + {/* ROW ACTION MENU */} + + + + + + + Edit Details + + + { + handleCloseActionMenu(); + deleteDeploymentDialog(); + }}> + + + + Delete + + + + {/* DATA TABLE */} + 'auto'} + disableColumnMenu + rows={rows} + getRowId={(row: IDeploymentRowData) => row.id} + columns={columns} + rowSelectionModel={selectedRows} + onRowSelectionModelChange={setSelectedRows} + checkboxSelection + initialState={{ + pagination: { + paginationModel: { page: 1, pageSize: 10 } + } + }} + pageSizeOptions={[10, 25, 50]} + /> + + ); +}; diff --git a/app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysButton.tsx b/app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysButton.tsx similarity index 89% rename from app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysButton.tsx rename to app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysButton.tsx index a2c54ff8cf..ddbe4e61e0 100644 --- a/app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysButton.tsx +++ b/app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysButton.tsx @@ -1,8 +1,8 @@ import { mdiKeyVariant } from '@mdi/js'; import Icon from '@mdi/react'; import Button from '@mui/material/Button'; -import { TelemetryDeviceKeysDialog } from 'features/surveys/telemetry/device-keys/TelemetryDeviceKeysDialog'; import { useState } from 'react'; +import { TelemetryDeviceKeysDialog } from './TelemetryDeviceKeysDialog'; export interface ITelemetryDeviceKeysButtonProps { /** diff --git a/app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysDialog.tsx b/app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysDialog.tsx similarity index 98% rename from app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysDialog.tsx rename to app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysDialog.tsx index 29302327b0..a377683d78 100644 --- a/app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysDialog.tsx +++ b/app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysDialog.tsx @@ -11,7 +11,7 @@ import { AxiosProgressEvent, CancelTokenSource } from 'axios'; import AlertBar from 'components/alert/AlertBar'; import FileUpload from 'components/file-upload/FileUpload'; import { AttachmentTypeFileExtensions } from 'constants/attachments'; -import { TelemetryDeviceKeysList } from 'features/surveys/telemetry/device-keys/TelemetryDeviceKeysList'; +import { TelemetryDeviceKeysList } from 'features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysList'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { useSurveyContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; @@ -117,6 +117,7 @@ export const TelemetryDeviceKeysDialog = (props: ITelemetryDeviceKeysDialogProps severity="info" variant="standard" title="Automatic Data Retrievals" + sx={{ mb: 3 }} text={ Telemetry data can be imported manually or, for Vectronic and Lotek devices, retrieved automatically diff --git a/app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysList.tsx b/app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysList.tsx similarity index 100% rename from app/src/features/surveys/telemetry/device-keys/TelemetryDeviceKeysList.tsx rename to app/src/features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysList.tsx diff --git a/app/src/features/surveys/telemetry/manage/devices/create/CreateDevicePage.tsx b/app/src/features/surveys/telemetry/manage/devices/create/CreateDevicePage.tsx new file mode 100644 index 0000000000..d5cdc4c5ec --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/create/CreateDevicePage.tsx @@ -0,0 +1,108 @@ +import Box from '@mui/material/Box'; +import CircularProgress from '@mui/material/CircularProgress'; +import FormikErrorSnackbar from 'components/alert/FormikErrorSnackbar'; +import { UNIQUE_CONSTRAINT_ERROR } from 'constants/errors'; +import { TelemetryDeviceI18N } from 'constants/i18n'; +import { + DeviceForm, + DeviceFormInitialValues, + DeviceFormYupSchema +} from 'features/surveys/telemetry/manage/devices/form/DeviceForm'; +import { DeviceFormHeader } from 'features/surveys/telemetry/manage/devices/form/DeviceFormHeader'; +import { Formik, FormikProps } from 'formik'; +import { APIError } from 'hooks/api/useAxios'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useDialogContext, useProjectContext, useSurveyContext } from 'hooks/useContext'; +import { SKIP_CONFIRMATION_DIALOG, useUnsavedChangesDialog } from 'hooks/useUnsavedChangesDialog'; +import { CreateTelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import { useRef, useState } from 'react'; +import { Prompt, useHistory } from 'react-router'; + +/** + * Renders the Create Device page. + * + * @return {*} + */ +export const CreateDevicePage = () => { + const history = useHistory(); + + const biohubApi = useBiohubApi(); + + const dialogContext = useDialogContext(); + const projectContext = useProjectContext(); + const surveyContext = useSurveyContext(); + + const formikRef = useRef>(null); + const [isSubmitting, setIsSubmitting] = useState(false); + + const { locationChangeInterceptor } = useUnsavedChangesDialog(); + + if (!surveyContext.surveyDataLoader.data || !projectContext.projectDataLoader.data) { + return ; + } + + const handleSubmit = async (values: CreateTelemetryDevice) => { + setIsSubmitting(true); + + try { + await biohubApi.telemetryDevice.createDevice(surveyContext.projectId, surveyContext.surveyId, { + serial: values.serial, + device_make_id: values.device_make_id, + model: values.model, + comment: values.comment + }); + + history.push( + `/admin/projects/${surveyContext.projectId}/surveys/${surveyContext.surveyId}/telemetry/manage`, + SKIP_CONFIRMATION_DIALOG + ); + } catch (error) { + dialogContext.setErrorDialog({ + dialogTitle: TelemetryDeviceI18N.createErrorTitle, + dialogText: !(error as APIError).message.includes(UNIQUE_CONSTRAINT_ERROR) + ? TelemetryDeviceI18N.createErrorText + : undefined, + dialogError: (error as APIError).message, + dialogErrorDetails: (error as APIError)?.errors, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + }, + open: true + }); + + setIsSubmitting(false); + } + }; + + return ( + <> + + + + + + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/devices/edit/EditDevicePage.tsx b/app/src/features/surveys/telemetry/manage/devices/edit/EditDevicePage.tsx new file mode 100644 index 0000000000..74532750f2 --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/edit/EditDevicePage.tsx @@ -0,0 +1,123 @@ +import Box from '@mui/material/Box'; +import CircularProgress from '@mui/material/CircularProgress'; +import FormikErrorSnackbar from 'components/alert/FormikErrorSnackbar'; +import { TelemetryDeviceI18N } from 'constants/i18n'; +import { DeviceForm, DeviceFormYupSchema } from 'features/surveys/telemetry/manage/devices/form/DeviceForm'; +import { DeviceFormHeader } from 'features/surveys/telemetry/manage/devices/form/DeviceFormHeader'; +import { Formik, FormikProps } from 'formik'; +import { APIError } from 'hooks/api/useAxios'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useDialogContext, useProjectContext, useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { SKIP_CONFIRMATION_DIALOG, useUnsavedChangesDialog } from 'hooks/useUnsavedChangesDialog'; +import { UpdateTelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import { useEffect, useRef, useState } from 'react'; +import { Prompt, useHistory, useParams } from 'react-router'; + +/** + * Renders the Edit Device page. + * + * @return {*} + */ +export const EditDevicePage = () => { + const history = useHistory(); + + const biohubApi = useBiohubApi(); + + const dialogContext = useDialogContext(); + const projectContext = useProjectContext(); + const surveyContext = useSurveyContext(); + + const formikRef = useRef>(null); + const [isSubmitting, setIsSubmitting] = useState(false); + + const { locationChangeInterceptor } = useUnsavedChangesDialog(); + + const urlParams: Record = useParams(); + const deviceId: number | undefined = Number(urlParams['device_id']); + + const deviceDataLoader = useDataLoader(biohubApi.telemetryDevice.getDeviceById); + + useEffect(() => { + deviceDataLoader.load(surveyContext.projectId, surveyContext.surveyId, deviceId); + }, [deviceDataLoader, deviceId, surveyContext.projectId, surveyContext.surveyId]); + + if (!surveyContext.surveyDataLoader.data || !projectContext.projectDataLoader.data || !deviceDataLoader.data) { + return ; + } + + const device = deviceDataLoader.data.device; + + const deviceFormInitialValues = { + device_id: device.device_id, + serial: device.serial, + device_make_id: device.device_make_id, + model: device.model, + comment: device.comment + }; + + const handleSubmit = async (values: UpdateTelemetryDevice) => { + setIsSubmitting(true); + + try { + await biohubApi.telemetryDevice.updateDevice(surveyContext.projectId, surveyContext.surveyId, deviceId, { + serial: values.serial, + device_make_id: values.device_make_id, + model: values.model, + comment: values.comment + }); + + // telemetryDataContext.devicesDataLoader.refresh(surveyContext.projectId, surveyContext.surveyId); + + history.push( + `/admin/projects/${surveyContext.projectId}/surveys/${surveyContext.surveyId}/telemetry/manage`, + SKIP_CONFIRMATION_DIALOG + ); + } catch (error) { + dialogContext.setErrorDialog({ + dialogTitle: TelemetryDeviceI18N.createErrorTitle, + dialogText: TelemetryDeviceI18N.createErrorText, + dialogError: (error as APIError).message, + dialogErrorDetails: (error as APIError)?.errors, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + }, + open: true + }); + + setIsSubmitting(false); + } + }; + + return ( + <> + + + + + + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/devices/form/DeviceForm.tsx b/app/src/features/surveys/telemetry/manage/devices/form/DeviceForm.tsx new file mode 100644 index 0000000000..d49d95cd91 --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/form/DeviceForm.tsx @@ -0,0 +1,87 @@ +import LoadingButton from '@mui/lab/LoadingButton/LoadingButton'; +import Button from '@mui/material/Button'; +import Container from '@mui/material/Container'; +import Divider from '@mui/material/Divider'; +import Paper from '@mui/material/Paper'; +import Stack from '@mui/material/Stack'; +import HorizontalSplitFormComponent from 'components/fields/HorizontalSplitFormComponent'; +import { + DeviceDetailsForm, + DeviceDetailsFormInitialValues, + DeviceDetailsFormYupSchema +} from 'features/surveys/telemetry/manage/devices/form/device-details/DeviceDetailsForm'; +import { useFormikContext } from 'formik'; +import { useCodesContext, useSurveyContext } from 'hooks/useContext'; +import { CreateTelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import { useHistory } from 'react-router'; + +export const DeviceFormInitialValues = { + ...DeviceDetailsFormInitialValues +}; + +export const DeviceFormYupSchema = DeviceDetailsFormYupSchema; + +interface IDeviceFormProps { + isSubmitting: boolean; +} + +/** + * Device form component. + * + * @param {IDeviceFormProps} props + * @return {*} + */ +export const DeviceForm = (props: IDeviceFormProps) => { + const { isSubmitting } = props; + + const { submitForm } = useFormikContext(); + + const codesContext = useCodesContext(); + const surveyContext = useSurveyContext(); + + const history = useHistory(); + + return ( + + + + + ({ + label: data.name, + value: data.id + })) ?? [] + } + /> + + + + + + { + submitForm(); + }}> + Save and Exit + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/devices/form/DeviceFormHeader.tsx b/app/src/features/surveys/telemetry/manage/devices/form/DeviceFormHeader.tsx new file mode 100644 index 0000000000..42e6886bfe --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/form/DeviceFormHeader.tsx @@ -0,0 +1,110 @@ +import { LoadingButton } from '@mui/lab'; +import Breadcrumbs from '@mui/material/Breadcrumbs'; +import Button from '@mui/material/Button'; +import { grey } from '@mui/material/colors'; +import Container from '@mui/material/Container'; +import Link from '@mui/material/Link'; +import Paper from '@mui/material/Paper'; +import Stack from '@mui/material/Stack'; +import Typography from '@mui/material/Typography'; +import { useFormikContext } from 'formik'; +import { CreateTelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import { useHistory } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; + +export interface IDeviceFormHeaderProps { + project_id: number; + project_name: string; + survey_id: number; + survey_name: string; + is_submitting: boolean; + title: string; + breadcrumb: string; +} + +/** + * Renders the header of the create and edit device pages. + * + * @param {IDeviceFormHeaderProps} props + * @return {*} + */ +export const DeviceFormHeader = (props: IDeviceFormHeaderProps) => { + const history = useHistory(); + const formikProps = useFormikContext(); + + const { project_id, survey_id, survey_name, project_name, is_submitting, title, breadcrumb } = props; + + return ( + + + + + {project_name} + + + {survey_name} + + + Manage Telemetry + + + Manage Devices and Deployments + + + {breadcrumb} + + + + + {title} + + + { + formikProps.submitForm(); + }}> + Save and Exit + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/devices/form/device-details/DeviceDetailsForm.tsx b/app/src/features/surveys/telemetry/manage/devices/form/device-details/DeviceDetailsForm.tsx new file mode 100644 index 0000000000..9cd128c27a --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/form/device-details/DeviceDetailsForm.tsx @@ -0,0 +1,60 @@ +import Grid from '@mui/material/Grid'; +import AutocompleteField, { IAutocompleteFieldOption } from 'components/fields/AutocompleteField'; +import CustomTextField from 'components/fields/CustomTextField'; +import yup from 'utils/YupSchema'; + +export const DeviceDetailsFormInitialValues: yup.InferType = { + device_make_id: null as unknown as number, + serial: null as unknown as string, + model: null, + comment: null +}; + +export const DeviceDetailsFormYupSchema = yup.object({ + device_make_id: yup.number().nullable().required('You must enter the device make'), + serial: yup.string().nullable().required('You must enter the device serial number'), + model: yup.string().max(100, 'Cannot exceed 100 characters').nullable().default(null), + comment: yup.string().max(250, 'Cannot exceed 250 characters').nullable().default(null) +}); + +interface IDeviceDetailsFormProps { + deviceMakes: IAutocompleteFieldOption[]; +} + +/** + * Device form - details section. + * + * @param {IDeviceDetailsFormProps} props + * @return {*} + */ +export const DeviceDetailsForm = (props: IDeviceDetailsFormProps) => { + const { deviceMakes } = props; + + return ( + + + + + + + + + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/devices/table/DevicesContainer.tsx b/app/src/features/surveys/telemetry/manage/devices/table/DevicesContainer.tsx new file mode 100644 index 0000000000..517cf5269a --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/table/DevicesContainer.tsx @@ -0,0 +1,194 @@ +import { mdiArrowTopRight, mdiDotsVertical, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; +import Icon from '@mdi/react'; +import Box from '@mui/material/Box'; +import Button from '@mui/material/Button'; +import Divider from '@mui/material/Divider'; +import IconButton from '@mui/material/IconButton'; +import ListItemIcon from '@mui/material/ListItemIcon'; +import ListItemText from '@mui/material/ListItemText'; +import Menu from '@mui/material/Menu'; +import MenuItem from '@mui/material/MenuItem'; +import Stack from '@mui/material/Stack'; +import Toolbar from '@mui/material/Toolbar'; +import Typography from '@mui/material/Typography'; +import { GridRowSelectionModel } from '@mui/x-data-grid'; +import { LoadingGuard } from 'components/loading/LoadingGuard'; +import { SkeletonTable } from 'components/loading/SkeletonLoaders'; +import { NoDataOverlay } from 'components/overlay/NoDataOverlay'; +import { FOREIGN_KEY_CONSTRAINT_ERROR } from 'constants/errors'; +import { DevicesTable } from 'features/surveys/telemetry/manage/devices/table/DevicesTable'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useDialogContext, useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { useEffect, useState } from 'react'; +import { Link as RouterLink } from 'react-router-dom'; +import { TelemetryDeviceKeysButton } from '../../device-keys/TelemetryDeviceKeysButton'; + +export const DevicesContainer = () => { + const dialogContext = useDialogContext(); + const surveyContext = useSurveyContext(); + + const biohubApi = useBiohubApi(); + + // State for bulk actions + const [headerAnchorEl, setHeaderAnchorEl] = useState(null); + const [selectedRows, setSelectedRows] = useState([]); + + const devicesDataLoader = useDataLoader((projectId: number, surveyId: number) => + biohubApi.telemetryDevice.getDevicesInSurvey(projectId, surveyId) + ); + + useEffect(() => { + devicesDataLoader.load(surveyContext.projectId, surveyContext.surveyId); + }, [devicesDataLoader, surveyContext.projectId, surveyContext.surveyId]); + + const devices = devicesDataLoader.data?.devices ?? []; + const devicesCount = devicesDataLoader.data?.count ?? 0; + + // Handler for bulk delete operation + const handleBulkDelete = async () => { + try { + await biohubApi.telemetryDevice.deleteDevices( + surveyContext.projectId, + surveyContext.surveyId, + selectedRows.map((id) => Number(id)) + ); + dialogContext.setYesNoDialog({ open: false }); // Close confirmation dialog + setSelectedRows([]); // Clear selection + onDelete(); // Refresh data + } catch (error) { + dialogContext.setYesNoDialog({ open: false }); // Close confirmation dialog on error + setSelectedRows([]); // Clear selection + // Show snackbar with error message + dialogContext.setSnackbar({ + snackbarMessage: ( + <> + + Error Deleting Devices + + {String(error).includes(FOREIGN_KEY_CONSTRAINT_ERROR) ? ( + + You must delete the deployments involving these devices before deleting the devices. + + ) : ( + + {String(error)} + + )} + + ), + open: true + }); + } + }; + + // Handler for clicking on header menu (bulk actions) + const handleHeaderMenuClick = (event: React.MouseEvent) => { + setHeaderAnchorEl(event.currentTarget); + }; + + // Handler for confirming bulk delete operation + const handlePromptConfirmBulkDelete = () => { + setHeaderAnchorEl(null); // Close header menu + dialogContext.setYesNoDialog({ + dialogTitle: 'Delete Devices?', + dialogContent: ( + + Are you sure you want to delete the selected devices? + + ), + yesButtonLabel: 'Delete Devices', + noButtonLabel: 'Cancel', + yesButtonProps: { color: 'error' }, + onClose: () => dialogContext.setYesNoDialog({ open: false }), + onNo: () => dialogContext.setYesNoDialog({ open: false }), + open: true, + onYes: handleBulkDelete + }); + }; + + const onDelete = () => { + devicesDataLoader.refresh(surveyContext.projectId, surveyContext.surveyId); + }; + + return ( + <> + {/* Bulk action menu */} + setHeaderAnchorEl(null)} + anchorEl={headerAnchorEl} + anchorOrigin={{ vertical: 'top', horizontal: 'right' }} + transformOrigin={{ vertical: 'top', horizontal: 'right' }}> + + + + + Delete + + + + + + Devices ‌ + + ({devicesCount}) + + + + + + + + + + + + + + + } + isLoadingFallbackDelay={100}> + + } + isLoadingFallbackDelay={100} + hasNoData={!devicesCount} + hasNoDataFallback={ + + } + hasNoDataFallbackDelay={100}> + + + + + + + ); +}; diff --git a/app/src/features/surveys/telemetry/manage/devices/table/DevicesTable.tsx b/app/src/features/surveys/telemetry/manage/devices/table/DevicesTable.tsx new file mode 100644 index 0000000000..89b3833758 --- /dev/null +++ b/app/src/features/surveys/telemetry/manage/devices/table/DevicesTable.tsx @@ -0,0 +1,271 @@ +import { mdiDotsVertical, mdiPencilOutline, mdiTrashCanOutline } from '@mdi/js'; +import Icon from '@mdi/react'; +import Box from '@mui/material/Box'; +import grey from '@mui/material/colors/grey'; +import IconButton from '@mui/material/IconButton'; +import ListItemIcon from '@mui/material/ListItemIcon'; +import ListItemText from '@mui/material/ListItemText'; +import Menu, { MenuProps } from '@mui/material/Menu'; +import MenuItem from '@mui/material/MenuItem'; +import Tooltip from '@mui/material/Tooltip'; +import Typography from '@mui/material/Typography'; +import { GridColDef, GridRowSelectionModel } from '@mui/x-data-grid'; +import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; +import { FOREIGN_KEY_CONSTRAINT_ERROR } from 'constants/errors'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useCodesContext, useDialogContext, useSurveyContext } from 'hooks/useContext'; +import { TelemetryDevice } from 'interfaces/useTelemetryDeviceApi.interface'; +import { useEffect, useState } from 'react'; +import { Link as RouterLink } from 'react-router-dom'; + +export interface IDeviceRowData { + id: number; + device_id: number; + serial: string; + device_make_id: number; + model: string | null; + comment: string | null; +} + +interface IDevicesTableProps { + devices: TelemetryDevice[]; + selectedRows: GridRowSelectionModel; + setSelectedRows: (selection: GridRowSelectionModel) => void; + /** + * Callback fired when a deployment is deleted. + */ + onDelete?: () => void; +} + +/** + * Returns a table of telemetry devices. + * + * @param {IDevicesTableProps} props + * @return {*} + */ +export const DevicesTable = (props: IDevicesTableProps) => { + const { devices, selectedRows, setSelectedRows, onDelete } = props; + + const biohubApi = useBiohubApi(); + + const codesContext = useCodesContext(); + const dialogContext = useDialogContext(); + const surveyContext = useSurveyContext(); + + const [actionMenuDeviceId, setActionMenuDeviceId] = useState(); + const [actionMenuAnchorEl, setActionMenuAnchorEl] = useState(null); + + useEffect(() => { + codesContext.codesDataLoader.load(); + }, [codesContext.codesDataLoader]); + + const handleCloseActionMenu = () => { + setActionMenuAnchorEl(null); + }; + + const handleDeleteDevice = async () => { + if (!actionMenuDeviceId) { + return; + } + + await biohubApi.telemetryDevice + .deleteDevice(surveyContext.projectId, surveyContext.surveyId, actionMenuDeviceId) + .then(() => { + dialogContext.setYesNoDialog({ open: false }); + setActionMenuAnchorEl(null); + onDelete?.(); + }) + .catch((error: any) => { + dialogContext.setYesNoDialog({ open: false }); + setActionMenuAnchorEl(null); + dialogContext.setSnackbar({ + snackbarMessage: ( + <> + + Error Deleting Device + + {String(error).includes(FOREIGN_KEY_CONSTRAINT_ERROR) ? ( + + You must delete the deployments involving this device before deleting the device. + + ) : ( + + {String(error)} + + )} + + ), + open: true + }); + }); + }; + + /** + * Display the delete device dialog. + */ + const deleteDeviceDialog = () => { + dialogContext.setYesNoDialog({ + dialogTitle: 'Delete device?', + dialogText: 'Are you sure you want to permanently delete this device?', + yesButtonLabel: 'Delete Device', + noButtonLabel: 'Cancel', + yesButtonProps: { color: 'error' }, + onClose: () => { + dialogContext.setYesNoDialog({ open: false }); + }, + onNo: () => { + dialogContext.setYesNoDialog({ open: false }); + }, + open: true, + onYes: () => { + handleDeleteDevice(); + } + }); + }; + + const rows: IDeviceRowData[] = devices.map((device) => ({ + id: device.device_id, + device_id: device.device_id, + serial: device.serial, + device_make_id: device.device_make_id, + model: device.model, + comment: device.comment + })); + + const columns: GridColDef[] = [ + { + field: 'device_id', + headerName: 'Device ID', + description: 'The unique key for the device', + width: 85, + minWidth: 85, + renderHeader: (params) => ( + + + ID + + + ), + renderCell: (params) => ( + + {params.row.device_id} + + ) + }, + { + field: 'serial', + headerName: 'Serial Number', + description: 'The serial number of the device', + flex: 1 + }, + { + field: 'device_make_id', + headerName: 'Make', + description: 'The manufacturer of the device', + flex: 1, + renderCell: (params) => ( + + {codesContext.codesDataLoader.data?.telemetry_device_makes.find( + (deviceMake) => deviceMake.id === params.row.device_make_id + )?.name ?? null} + + ) + }, + { + field: 'model', + headerName: 'Model', + description: 'The model of the device', + flex: 1 + }, + { + field: 'comment', + headerName: 'Comment', + flex: 1 + }, + { + field: 'actions', + type: 'actions', + sortable: false, + width: 10, + align: 'right', + renderCell: (params) => { + return ( + + { + setActionMenuDeviceId(params.row.device_id); + setActionMenuAnchorEl(event.currentTarget); + }}> + + + + ); + } + } + ]; + + return ( + <> + {/* ROW ACTION MENU */} + + + + + + + Edit Details + + + { + handleCloseActionMenu(); + deleteDeviceDialog(); + }}> + + + + Delete + + + + {/* DATA TABLE */} + 'auto'} + disableColumnMenu + rows={rows} + getRowId={(row: IDeviceRowData) => row.id} + columns={columns} + rowSelectionModel={selectedRows} + onRowSelectionModelChange={setSelectedRows} + checkboxSelection + initialState={{ + pagination: { + paginationModel: { page: 1, pageSize: 10 } + } + }} + pageSizeOptions={[10, 25, 50]} + /> + + ); +}; diff --git a/app/src/features/surveys/telemetry/table/TelemetryTable.tsx b/app/src/features/surveys/telemetry/table/TelemetryTable.tsx index 1f1964690d..6776198298 100644 --- a/app/src/features/surveys/telemetry/table/TelemetryTable.tsx +++ b/app/src/features/surveys/telemetry/table/TelemetryTable.tsx @@ -7,20 +7,18 @@ import { GenericTimeColDef } from 'components/data-grid/GenericGridColumnDefinitions'; import { SkeletonTable } from 'components/loading/SkeletonLoaders'; -import { IManualTelemetryTableRow } from 'contexts/telemetryTableContext'; +import { IManualTelemetryTableRow, MANUAL_TELEMETRY_TYPE } from 'contexts/telemetryTableContext'; import { DeploymentColDef, DeviceColDef, TelemetryTypeColDef } from 'features/surveys/telemetry/table/utils/GridColumnDefinitions'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { useSurveyContext, useTelemetryDataContext, useTelemetryTableContext } from 'hooks/useContext'; +import { useSurveyContext, useTelemetryTableContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; import { IAnimalDeploymentWithCritter } from 'interfaces/useSurveyApi.interface'; import { useEffect, useMemo } from 'react'; -const MANUAL_TELEMETRY_TYPE = 'MANUAL'; - interface IManualTelemetryTableProps { isLoading: boolean; } @@ -29,10 +27,9 @@ export const TelemetryTable = (props: IManualTelemetryTableProps) => { const biohubApi = useBiohubApi(); const surveyContext = useSurveyContext(); - const telemetryDataContext = useTelemetryDataContext(); const telemetryTableContext = useTelemetryTableContext(); - const deploymentDataLoader = telemetryDataContext.deploymentsDataLoader; + const deploymentDataLoader = useDataLoader(biohubApi.telemetryDeployment.getDeploymentsInSurvey); const critterDataLoader = useDataLoader(biohubApi.survey.getSurveyCritters); useEffect(() => { @@ -66,16 +63,19 @@ export const TelemetryTable = (props: IManualTelemetryTableProps) => { return critterDeployments; }, [critterDataLoader.data, deploymentDataLoader.data]); - const columns: GridColDef[] = [ - DeploymentColDef({ critterDeployments, hasError: telemetryTableContext.hasError }), - // TODO: Show animal nickname as a column - DeviceColDef({ critterDeployments }), - GenericDateColDef({ field: 'date', headerName: 'Date', hasError: telemetryTableContext.hasError }), - GenericTimeColDef({ field: 'time', headerName: 'Time', hasError: telemetryTableContext.hasError }), - GenericLatitudeColDef({ field: 'latitude', headerName: 'Latitude', hasError: telemetryTableContext.hasError }), - GenericLongitudeColDef({ field: 'longitude', headerName: 'Longitude', hasError: telemetryTableContext.hasError }), - TelemetryTypeColDef() - ]; + const columns: GridColDef[] = useMemo( + () => [ + DeploymentColDef({ critterDeployments, hasError: telemetryTableContext.hasError }), + // TODO: Show animal nickname as a column + DeviceColDef({ critterDeployments }), + GenericDateColDef({ field: 'date', headerName: 'Date', hasError: telemetryTableContext.hasError }), + GenericTimeColDef({ field: 'time', headerName: 'Time', hasError: telemetryTableContext.hasError }), + GenericLatitudeColDef({ field: 'latitude', headerName: 'Latitude', hasError: telemetryTableContext.hasError }), + GenericLongitudeColDef({ field: 'longitude', headerName: 'Longitude', hasError: telemetryTableContext.hasError }), + TelemetryTypeColDef() + ], + [critterDeployments, telemetryTableContext.hasError] + ); return ( { onRowEditStop={(_params, event) => { event.defaultMuiPrevented = true; }} + // Pagination + paginationMode="server" + rowCount={telemetryTableContext.recordCount} + pageSizeOptions={[25, 50, 100]} + paginationModel={telemetryTableContext.paginationModel} + onPaginationModelChange={telemetryTableContext.setPaginationModel} + // Sorting + sortingMode="server" + sortModel={telemetryTableContext.sortModel} + onSortModelChange={telemetryTableContext.setSortModel} // Styling rowHeight={56} localeText={{ noRowsLabel: 'No Records' }} getRowHeight={() => 'auto'} - initialState={{ - pagination: { - paginationModel: { page: 0, pageSize: 25 } - } - }} - pageSizeOptions={[25, 50, 100]} slots={{ loadingOverlay: SkeletonTable }} diff --git a/app/src/features/surveys/telemetry/table/TelemetryTableContainer.tsx b/app/src/features/surveys/telemetry/table/TelemetryTableContainer.tsx index 979783f9c7..5007374ba3 100644 --- a/app/src/features/surveys/telemetry/table/TelemetryTableContainer.tsx +++ b/app/src/features/surveys/telemetry/table/TelemetryTableContainer.tsx @@ -21,13 +21,12 @@ import YesNoDialog from 'components/dialog/YesNoDialog'; import { TelemetryTableI18N } from 'constants/i18n'; import { DialogContext, ISnackbarProps } from 'contexts/dialogContext'; import { SurveyContext } from 'contexts/surveyContext'; +import { TelemetryDeviceKeysButton } from 'features/surveys/telemetry/manage/device-keys/TelemetryDeviceKeysButton'; import { TelemetryTable } from 'features/surveys/telemetry/table/TelemetryTable'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { useTelemetryTableContext } from 'hooks/useContext'; import { useContext, useDeferredValue, useState } from 'react'; -import { pluralize as p } from 'utils/Utils'; -import { TelemetryDeviceKeysButton } from '../device-keys/TelemetryDeviceKeysButton'; export const TelemetryTableContainer = () => { const biohubApi = useBiohubApi(); @@ -150,6 +149,8 @@ export const TelemetryTableContainer = () => { variant="contained" color="primary" startIcon={} + // TODO: Disabled while the backend CSV Import code is being refactored (https://apps.nrs.gov.bc.ca/int/jira/browse/SIMSBIOHUB-652) + disabled={true} onClick={() => setShowImportDialog(true)}> Import @@ -274,7 +275,7 @@ export const TelemetryTableContainer = () => { - Delete {p(numSelectedRows, 'Telemetr', 'y', 'ies')} + Delete Telemetry diff --git a/app/src/features/surveys/telemetry/table/utils/GridColumnDefinitions.tsx b/app/src/features/surveys/telemetry/table/utils/GridColumnDefinitions.tsx index a18885cc98..cb9f922009 100644 --- a/app/src/features/surveys/telemetry/table/utils/GridColumnDefinitions.tsx +++ b/app/src/features/surveys/telemetry/table/utils/GridColumnDefinitions.tsx @@ -38,12 +38,12 @@ export const DeploymentColDef = (props: { renderCell: (params) => { const error = props.hasError(params); return ( - + dataGridProps={params} options={props.critterDeployments.map((item) => { return { - label: `${item.critter.animal_id}: ${item.deployment.device_id}`, - value: item.deployment.bctw_deployment_id + label: `${item.deployment.deployment_id}: ${item.critter.animal_id}`, + value: item.deployment.deployment_id }; })} error={error} @@ -54,11 +54,11 @@ export const DeploymentColDef = (props: { const error = props.hasError(params); return ( - + dataGridProps={params} options={props.critterDeployments.map((item) => ({ - label: `${item.critter.animal_id}: ${item.deployment.device_id}`, - value: item.deployment.bctw_deployment_id + label: `${item.deployment.deployment_id}: ${item.critter.animal_id}`, + value: item.deployment.deployment_id }))} error={error} /> @@ -71,7 +71,7 @@ export const DeviceColDef = (props: { critterDeployments: IAnimalDeploymentWithCritter[]; }): GridColDef => { return { - field: 'device_id', + field: 'serial', headerName: 'Device', hideable: true, minWidth: 120, @@ -82,8 +82,8 @@ export const DeviceColDef = (props: { { props.critterDeployments.find( - (deployment) => deployment.deployment.bctw_deployment_id === params.row.deployment_id - )?.deployment.device_id + (deployment) => deployment.deployment.deployment_id === params.row.deployment_id + )?.deployment.serial } ) diff --git a/app/src/features/surveys/view/components/data-container/SurveyObservationTabularDataContainer.tsx b/app/src/features/surveys/view/components/data-container/SurveyObservationTabularDataContainer.tsx deleted file mode 100644 index cd5f80950e..0000000000 --- a/app/src/features/surveys/view/components/data-container/SurveyObservationTabularDataContainer.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import { mdiChartBar, mdiTallyMark5 } from '@mdi/js'; -import Box from '@mui/material/Box'; -import Divider from '@mui/material/Divider'; -import CustomToggleButtonGroup from 'components/toolbar/CustomToggleButtonGroup'; -import { useState } from 'react'; -import { SurveySpatialObservationTable } from '../../survey-spatial/components/observation/SurveySpatialObservationTable'; -import { SurveyObservationAnalytics } from '../analytics/SurveyObservationAnalytics'; - -export enum SurveyObservationTabularDataContainerViewEnum { - COUNTS = 'COUNTS', - ANALYTICS = 'ANALYTICS' -} - -const SurveyObservationTabularDataContainer = () => { - const [activeView, setActiveView] = useState( - SurveyObservationTabularDataContainerViewEnum.COUNTS - ); - - const views = [ - { label: 'Counts', value: SurveyObservationTabularDataContainerViewEnum.COUNTS, icon: mdiTallyMark5 }, - { label: 'Analytics', value: SurveyObservationTabularDataContainerViewEnum.ANALYTICS, icon: mdiChartBar } - ]; - - return ( - <> - - setActiveView(view)} - orientation="horizontal" - /> - - - - {activeView === SurveyObservationTabularDataContainerViewEnum.COUNTS && } - {activeView === SurveyObservationTabularDataContainerViewEnum.ANALYTICS && } - - - ); -}; - -export default SurveyObservationTabularDataContainer; diff --git a/app/src/features/surveys/view/survey-spatial/SurveySpatialContainer.tsx b/app/src/features/surveys/view/survey-spatial/SurveySpatialContainer.tsx index 81ce4855f9..6f4728e5c8 100644 --- a/app/src/features/surveys/view/survey-spatial/SurveySpatialContainer.tsx +++ b/app/src/features/surveys/view/survey-spatial/SurveySpatialContainer.tsx @@ -1,5 +1,4 @@ import { mdiEye, mdiPaw, mdiWifiMarker } from '@mdi/js'; -import { TelemetryDataContextProvider } from 'contexts/telemetryDataContext'; import { SurveySpatialAnimal } from 'features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimal'; import { SurveySpatialObservation } from 'features/surveys/view/survey-spatial/components/observation/SurveySpatialObservation'; import { @@ -7,9 +6,12 @@ import { SurveySpatialToolbar } from 'features/surveys/view/survey-spatial/components/SurveySpatialToolbar'; import { SurveySpatialTelemetry } from 'features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetry'; -import { useObservationsContext, useTaxonomyContext } from 'hooks/useContext'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useSurveyContext, useTaxonomyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; import { isEqual } from 'lodash-es'; import { useEffect, useMemo, useState } from 'react'; +import { ApiPaginationRequestOptions } from 'types/misc'; import { useSamplingSiteStaticLayer } from './components/map/useSamplingSiteStaticLayer'; import { useStudyAreaStaticLayer } from './components/map/useStudyAreaStaticLayer'; @@ -21,9 +23,15 @@ import { useStudyAreaStaticLayer } from './components/map/useStudyAreaStaticLaye * @returns {JSX.Element} The rendered component. */ export const SurveySpatialContainer = (): JSX.Element => { - const observationsContext = useObservationsContext(); + const surveyContext = useSurveyContext(); const taxonomyContext = useTaxonomyContext(); + const biohubApi = useBiohubApi(); + + const observationsDataLoader = useDataLoader((pagination?: ApiPaginationRequestOptions) => + biohubApi.observation.getObservationRecords(surveyContext.projectId, surveyContext.surveyId, pagination) + ); + const [activeView, setActiveView] = useState(SurveySpatialDatasetViewEnum.OBSERVATIONS); const studyAreaStaticLayer = useStudyAreaStaticLayer(); @@ -34,15 +42,24 @@ export const SurveySpatialContainer = (): JSX.Element => { [samplingSiteStaticLayer, studyAreaStaticLayer] ); + useEffect(() => { + // Load the observations data + observationsDataLoader.load(); + }, [observationsDataLoader]); + // Fetch and cache all taxonomic data required for the observations. useEffect(() => { const cacheTaxonomicData = async () => { - if (observationsContext.observationsDataLoader.data) { + if (observationsDataLoader.data) { // Fetch all unique ITIS TSNs from observations to retrieve taxonomic names const taxonomicIds = [ - ...new Set(observationsContext.observationsDataLoader.data.surveyObservations.map((item) => item.itis_tsn)) + ...new Set(observationsDataLoader.data.surveyObservations.map((item) => item.itis_tsn)) ].filter((tsn): tsn is number => tsn !== null); + if (!taxonomicIds.length) { + return; + } + await taxonomyContext.cacheSpeciesTaxonomyByIds(taxonomicIds); } }; @@ -50,7 +67,7 @@ export const SurveySpatialContainer = (): JSX.Element => { cacheTaxonomicData(); // Should not re-run this effect on `taxonomyContext` changes // eslint-disable-next-line react-hooks/exhaustive-deps - }, [observationsContext.observationsDataLoader.data]); + }, [observationsDataLoader.data]); return ( <> @@ -70,9 +87,7 @@ export const SurveySpatialContainer = (): JSX.Element => { )} {isEqual(SurveySpatialDatasetViewEnum.TELEMETRY, activeView) && ( - - - + )} {isEqual(SurveySpatialDatasetViewEnum.ANIMALS, activeView) && } diff --git a/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimal.tsx b/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimal.tsx index c31ad9f9d5..f4176e4ca1 100644 --- a/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimal.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimal.tsx @@ -105,7 +105,7 @@ export const SurveySpatialAnimal = (props: ISurveySpatialAnimalProps) => { {/* Display data table with animal capture details */} - + diff --git a/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimalTable.tsx b/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimalTable.tsx index 5c9061c819..7491bbcbdf 100644 --- a/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimalTable.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/animal/SurveySpatialAnimalTable.tsx @@ -1,4 +1,5 @@ import { mdiArrowTopRight } from '@mdi/js'; +import Box from '@mui/material/Box'; import { GridColDef } from '@mui/x-data-grid'; import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; import { LoadingGuard } from 'components/loading/LoadingGuard'; @@ -76,16 +77,22 @@ export const SurveySpatialAnimalTable = (props: ISurveyDataAnimalTableProps) => return ( 0 && (props.isLoading || animalsDataLoader.isLoading || !animalsDataLoader.isReady)} - isLoadingFallback={} + isLoadingFallback={ + + + + } isLoadingFallbackDelay={100} hasNoData={!animals.length || !rows.length} hasNoDataFallback={ - + + + } hasNoDataFallbackDelay={100}> - + ); diff --git a/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationContainer.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationContainer.tsx new file mode 100644 index 0000000000..2661ba95f6 --- /dev/null +++ b/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationContainer.tsx @@ -0,0 +1,41 @@ +import { mdiChartBar, mdiTallyMark5 } from '@mdi/js'; +import Box from '@mui/material/Box'; +import Divider from '@mui/material/Divider'; +import CustomToggleButtonGroup from 'components/toolbar/CustomToggleButtonGroup'; +import { SurveyObservationAnalytics } from 'features/surveys/view/survey-spatial/components/observation/analytics/SurveyObservationAnalytics'; +import { SurveySpatialObservationTable } from 'features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationTable'; +import { useState } from 'react'; + +export enum SurveySpatialObservationContainerViewEnum { + COUNTS = 'COUNTS', + ANALYTICS = 'ANALYTICS' +} + +export const SurveySpatialObservationContainer = () => { + const [activeView, setActiveView] = useState( + SurveySpatialObservationContainerViewEnum.COUNTS + ); + + const views = [ + { label: 'Counts', value: SurveySpatialObservationContainerViewEnum.COUNTS, icon: mdiTallyMark5 }, + { label: 'Analytics', value: SurveySpatialObservationContainerViewEnum.ANALYTICS, icon: mdiChartBar } + ]; + + return ( + <> + + setActiveView(view)} + orientation="horizontal" + /> + + + + {activeView === SurveySpatialObservationContainerViewEnum.COUNTS && } + {activeView === SurveySpatialObservationContainerViewEnum.ANALYTICS && } + + + ); +}; diff --git a/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationDeployment.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationDeployment.tsx new file mode 100644 index 0000000000..172aa02189 --- /dev/null +++ b/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationDeployment.tsx @@ -0,0 +1,202 @@ +import { mdiArrowTopRight } from '@mdi/js'; +import { GridColDef, GridSortModel } from '@mui/x-data-grid'; +import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; +import { LoadingGuard } from 'components/loading/LoadingGuard'; +import { SkeletonTable } from 'components/loading/SkeletonLoaders'; +import { NoDataOverlay } from 'components/overlay/NoDataOverlay'; +import { SurveyContext } from 'contexts/surveyContext'; +import dayjs from 'dayjs'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useTaxonomyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { useContext, useEffect, useState } from 'react'; + +// Set height so the skeleton loader will match table rows +const rowHeight = 52; + +interface IObservationTableRow { + survey_observation_id: number; + itis_tsn: number | null; + itis_scientific_name: string | null; + count: number | null; + survey_sample_site_name: string | null; + survey_sample_method_name: string | null; + survey_sample_period_start_datetime: string | null; + observation_date: string; + observation_time: string; + latitude: number | null; + longitude: number | null; +} + +/** + * Component to display observation data in a table with server-side pagination and sorting. + * + * @returns {*} + */ +export const SurveySpatialObservationDeployment = () => { + const biohubApi = useBiohubApi(); + const surveyContext = useContext(SurveyContext); + const taxonomyContext = useTaxonomyContext(); + + const [totalRows, setTotalRows] = useState(0); + const [page, setPage] = useState(0); + const [pageSize, setPageSize] = useState(10); + const [sortModel, setSortModel] = useState([]); + const [rows, setTableData] = useState([]); + const [tableColumns, setTableColumns] = useState[]>([]); + + const paginatedDataLoader = useDataLoader((page: number, limit: number, sort?: string, order?: 'asc' | 'desc') => + biohubApi.observation.getObservationRecords(surveyContext.projectId, surveyContext.surveyId, { + page: page + 1, // This fixes an off-by-one error between the front end and the back end + limit, + sort, + order + }) + ); + + // Page information has changed, fetch more data + useEffect(() => { + if (sortModel.length > 0) { + if (sortModel[0].sort) { + paginatedDataLoader.refresh(page, pageSize, sortModel[0].field, sortModel[0].sort); + } + } else { + paginatedDataLoader.refresh(page, pageSize); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [page, pageSize, sortModel]); + + // Update table data and columns when new data is loaded + useEffect(() => { + if (paginatedDataLoader.data) { + setTotalRows(paginatedDataLoader.data.pagination.total); + + setTableData( + paginatedDataLoader.data.surveyObservations.map((item) => { + return { + survey_observation_id: item.survey_observation_id, + itis_tsn: item.itis_tsn, + itis_scientific_name: + (item.itis_tsn && taxonomyContext.getCachedSpeciesTaxonomyById(item.itis_tsn)?.scientificName) || null, + count: item.count, + survey_sample_site_name: item.survey_sample_site_name, + survey_sample_method_name: item.survey_sample_method_name, + survey_sample_period_start_datetime: item.survey_sample_period_start_datetime, + observation_date: dayjs(item.observation_date).format('YYYY-MM-DD'), + observation_time: dayjs(item.observation_date).format('HH:mm:ss'), + latitude: item.latitude, + longitude: item.longitude + }; + }) + ); + + setTableColumns([ + { + field: 'itis_scientific_name', + headerName: 'Species', + flex: 1, + minWidth: 200, + renderCell: (params) => {params.row.itis_scientific_name} + }, + { + field: 'survey_sample_site_name', + headerName: 'Sample Site', + flex: 1, + minWidth: 200 + }, + { + field: 'survey_sample_method_name', + headerName: 'Sample Method', + flex: 1, + minWidth: 200 + }, + { + field: 'survey_sample_period_start_datetime', + headerName: 'Sample Period', + flex: 1, + minWidth: 200 + }, + { + field: 'count', + headerName: 'Count', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + }, + { + field: 'observation_date', + headerName: 'Date', + maxWidth: 120 + }, + { + field: 'observation_time', + headerName: 'Time', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + }, + { + field: 'latitude', + headerName: 'Lat', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + }, + { + field: 'longitude', + headerName: 'Long', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + } + ]); + } + }, [paginatedDataLoader.data, taxonomyContext]); + + return ( + } + isLoadingFallbackDelay={100} + hasNoData={!rows.length} + hasNoDataFallback={ + + } + hasNoDataFallbackDelay={100}> + { + setPage(model.page); + setPageSize(model.pageSize); + }} + pageSizeOptions={[10, 25, 50]} + paginationMode="server" + sortingMode="server" + sortModel={sortModel} + onSortModelChange={(model) => setSortModel(model)} + loading={paginatedDataLoader.isLoading} + getRowId={(row) => row.survey_observation_id} + columns={tableColumns} + rowSelection={false} + autoHeight={false} + checkboxSelection={false} + disableRowSelectionOnClick + disableColumnSelector + disableColumnFilter + disableColumnMenu + disableVirtualization + data-testid="survey-spatial-observation-data-table" + /> + + ); +}; diff --git a/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationTable.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationTable.tsx index ecef0ea336..c6226ae5e1 100644 --- a/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationTable.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/observation/SurveySpatialObservationTable.tsx @@ -34,16 +34,14 @@ interface IObservationTableRow { * @returns {*} */ export const SurveySpatialObservationTable = () => { - const biohubApi = useBiohubApi(); const surveyContext = useContext(SurveyContext); const taxonomyContext = useTaxonomyContext(); - const [totalRows, setTotalRows] = useState(0); + const biohubApi = useBiohubApi(); + const [page, setPage] = useState(0); const [pageSize, setPageSize] = useState(10); const [sortModel, setSortModel] = useState([]); - const [rows, setTableData] = useState([]); - const [tableColumns, setTableColumns] = useState[]>([]); const paginatedDataLoader = useDataLoader((page: number, limit: number, sort?: string, order?: 'asc' | 'desc') => biohubApi.observation.getObservationRecords(surveyContext.projectId, surveyContext.surveyId, { @@ -66,92 +64,87 @@ export const SurveySpatialObservationTable = () => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [page, pageSize, sortModel]); - // Update table data and columns when new data is loaded - useEffect(() => { - if (paginatedDataLoader.data) { - setTotalRows(paginatedDataLoader.data.pagination.total); + const rows = + paginatedDataLoader.data?.surveyObservations.map((item) => { + return { + survey_observation_id: item.survey_observation_id, + itis_tsn: item.itis_tsn, + itis_scientific_name: + (item.itis_tsn && taxonomyContext.getCachedSpeciesTaxonomyById(item.itis_tsn)?.scientificName) || null, + count: item.count, + survey_sample_site_name: item.survey_sample_site_name, + survey_sample_method_name: item.survey_sample_method_name, + survey_sample_period_start_datetime: item.survey_sample_period_start_datetime, + observation_date: dayjs(item.observation_date).format('YYYY-MM-DD'), + observation_time: dayjs(item.observation_date).format('HH:mm:ss'), + latitude: item.latitude, + longitude: item.longitude + }; + }) ?? []; - setTableData( - paginatedDataLoader.data.surveyObservations.map((item) => { - return { - survey_observation_id: item.survey_observation_id, - itis_tsn: item.itis_tsn, - itis_scientific_name: - (item.itis_tsn && taxonomyContext.getCachedSpeciesTaxonomyById(item.itis_tsn)?.scientificName) || null, - count: item.count, - survey_sample_site_name: item.survey_sample_site_name, - survey_sample_method_name: item.survey_sample_method_name, - survey_sample_period_start_datetime: item.survey_sample_period_start_datetime, - observation_date: dayjs(item.observation_date).format('YYYY-MM-DD'), - observation_time: dayjs(item.observation_date).format('HH:mm:ss'), - latitude: item.latitude, - longitude: item.longitude - }; - }) - ); + const rowCount = paginatedDataLoader.data?.pagination.total ?? 0; - setTableColumns([ - { - field: 'itis_scientific_name', - headerName: 'Species', - flex: 1, - minWidth: 200, - renderCell: (params) => {params.row.itis_scientific_name} - }, - { - field: 'survey_sample_site_name', - headerName: 'Sample Site', - flex: 1, - minWidth: 200 - }, - { - field: 'survey_sample_method_name', - headerName: 'Sample Method', - flex: 1, - minWidth: 200 - }, - { - field: 'survey_sample_period_start_datetime', - headerName: 'Sample Period', - flex: 1, - minWidth: 200 - }, - { - field: 'count', - headerName: 'Count', - headerAlign: 'right', - align: 'right', - maxWidth: 100 - }, - { - field: 'observation_date', - headerName: 'Date', - maxWidth: 120 - }, - { - field: 'observation_time', - headerName: 'Time', - headerAlign: 'right', - align: 'right', - maxWidth: 100 - }, - { - field: 'latitude', - headerName: 'Lat', - headerAlign: 'right', - align: 'right', - maxWidth: 100 - }, - { - field: 'longitude', - headerName: 'Long', - headerAlign: 'right', - align: 'right', - maxWidth: 100 - } - ]); + // Define table columns + const columns: GridColDef[] = [ + { + field: 'itis_scientific_name', + headerName: 'Species', + flex: 1, + minWidth: 200, + renderCell: (params) => {params.row.itis_scientific_name} + }, + { + field: 'survey_sample_site_name', + headerName: 'Sample Site', + flex: 1, + minWidth: 200 + }, + { + field: 'survey_sample_method_name', + headerName: 'Sample Method', + flex: 1, + minWidth: 200 + }, + { + field: 'survey_sample_period_start_datetime', + headerName: 'Sample Period', + flex: 1, + minWidth: 200 + }, + { + field: 'count', + headerName: 'Count', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + }, + { + field: 'observation_date', + headerName: 'Date', + maxWidth: 120 + }, + { + field: 'observation_time', + headerName: 'Time', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + }, + { + field: 'latitude', + headerName: 'Lat', + headerAlign: 'right', + align: 'right', + maxWidth: 100 + }, + { + field: 'longitude', + headerName: 'Long', + headerAlign: 'right', + align: 'right', + maxWidth: 100 } - }, [paginatedDataLoader.data, taxonomyContext]); + ]; return ( { hasNoDataFallbackDelay={100}> row.survey_observation_id} + autoHeight={false} + // pagination + paginationMode="server" paginationModel={{ pageSize, page }} + pageSizeOptions={[10, 25, 50]} onPaginationModelChange={(model) => { setPage(model.page); setPageSize(model.pageSize); }} - pageSizeOptions={[10, 25, 50]} - paginationMode="server" + // sorting sortingMode="server" + sortingOrder={['asc', 'desc']} sortModel={sortModel} onSortModelChange={(model) => setSortModel(model)} - loading={paginatedDataLoader.isLoading} - getRowId={(row) => row.survey_observation_id} - columns={tableColumns} - rowSelection={false} - autoHeight={false} + // misc checkboxSelection={false} disableRowSelectionOnClick disableColumnSelector diff --git a/app/src/features/surveys/view/components/analytics/SurveyObservationAnalytics.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/SurveyObservationAnalytics.tsx similarity index 98% rename from app/src/features/surveys/view/components/analytics/SurveyObservationAnalytics.tsx rename to app/src/features/surveys/view/survey-spatial/components/observation/analytics/SurveyObservationAnalytics.tsx index 7ab526e25c..8d856827fd 100644 --- a/app/src/features/surveys/view/components/analytics/SurveyObservationAnalytics.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/SurveyObservationAnalytics.tsx @@ -9,7 +9,7 @@ import ToggleButtonGroup from '@mui/material/ToggleButtonGroup'; import Typography from '@mui/material/Typography'; import { LoadingGuard } from 'components/loading/LoadingGuard'; import { SkeletonTable } from 'components/loading/SkeletonLoaders'; -import { ObservationAnalyticsDataTableContainer } from 'features/surveys/view/components/analytics/components/ObservationAnalyticsDataTableContainer'; +import { ObservationAnalyticsDataTableContainer } from 'features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTableContainer'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { useSurveyContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; diff --git a/app/src/features/surveys/view/components/analytics/components/ObservationAnalyticsDataTable.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTable.tsx similarity index 95% rename from app/src/features/surveys/view/components/analytics/components/ObservationAnalyticsDataTable.tsx rename to app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTable.tsx index 2fed674685..f116daf3b1 100644 --- a/app/src/features/surveys/view/components/analytics/components/ObservationAnalyticsDataTable.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTable.tsx @@ -1,6 +1,6 @@ import { GridColDef, GridColumnVisibilityModel } from '@mui/x-data-grid'; import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; -import { IObservationAnalyticsRow } from 'features/surveys/view/components/analytics/components/ObservationAnalyticsDataTableContainer'; +import { IObservationAnalyticsRow } from 'features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTableContainer'; const rowHeight = 50; diff --git a/app/src/features/surveys/view/components/analytics/components/ObservationAnalyticsDataTableContainer.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTableContainer.tsx similarity index 96% rename from app/src/features/surveys/view/components/analytics/components/ObservationAnalyticsDataTableContainer.tsx rename to app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTableContainer.tsx index e879240333..f065051f56 100644 --- a/app/src/features/surveys/view/components/analytics/components/ObservationAnalyticsDataTableContainer.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTableContainer.tsx @@ -4,8 +4,8 @@ import { GridColumnVisibilityModel } from '@mui/x-data-grid'; import { LoadingGuard } from 'components/loading/LoadingGuard'; import { SkeletonTable } from 'components/loading/SkeletonLoaders'; import { NoDataOverlay } from 'components/overlay/NoDataOverlay'; -import { ObservationAnalyticsDataTable } from 'features/surveys/view/components/analytics/components/ObservationAnalyticsDataTable'; -import { IGroupByOption } from 'features/surveys/view/components/analytics/SurveyObservationAnalytics'; +import { ObservationAnalyticsDataTable } from 'features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTable'; +import { IGroupByOption } from 'features/surveys/view/survey-spatial/components/observation/analytics/SurveyObservationAnalytics'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { useSurveyContext, useTaxonomyContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; diff --git a/app/src/features/surveys/view/components/analytics/components/ObservationsAnalyticsGridColumnDefinitions.tsx b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationsAnalyticsGridColumnDefinitions.tsx similarity index 96% rename from app/src/features/surveys/view/components/analytics/components/ObservationsAnalyticsGridColumnDefinitions.tsx rename to app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationsAnalyticsGridColumnDefinitions.tsx index 38acf9eb17..fb84144fec 100644 --- a/app/src/features/surveys/view/components/analytics/components/ObservationsAnalyticsGridColumnDefinitions.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationsAnalyticsGridColumnDefinitions.tsx @@ -4,8 +4,8 @@ import { GridColDef } from '@mui/x-data-grid'; import { DATE_FORMAT } from 'constants/dateTimeFormats'; import dayjs from 'dayjs'; import { ScientificNameTypography } from 'features/surveys/animals/components/ScientificNameTypography'; -import { IObservationAnalyticsRow } from 'features/surveys/view/components/analytics/components/ObservationAnalyticsDataTableContainer'; -import { IGroupByOption } from 'features/surveys/view/components/analytics/SurveyObservationAnalytics'; +import { IObservationAnalyticsRow } from 'features/surveys/view/survey-spatial/components/observation/analytics/components/ObservationAnalyticsDataTableContainer'; +import { IGroupByOption } from 'features/surveys/view/survey-spatial/components/observation/analytics/SurveyObservationAnalytics'; import { IGetSampleLocationNonSpatialDetails } from 'interfaces/useSamplingSiteApi.interface'; import { IPartialTaxonomy } from 'interfaces/useTaxonomyApi.interface'; import isEqual from 'lodash-es/isEqual'; diff --git a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialDeploymentTable.tsx b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialDeploymentTable.tsx new file mode 100644 index 0000000000..8fc1f33200 --- /dev/null +++ b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialDeploymentTable.tsx @@ -0,0 +1,240 @@ +import { mdiArrowTopRight } from '@mdi/js'; +import Typography from '@mui/material/Typography'; +import { GridColDef, GridSortModel } from '@mui/x-data-grid'; +import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; +import { LoadingGuard } from 'components/loading/LoadingGuard'; +import { SkeletonTable } from 'components/loading/SkeletonLoaders'; +import { NoDataOverlay } from 'components/overlay/NoDataOverlay'; +import { DATE_FORMAT } from 'constants/dateTimeFormats'; +import dayjs from 'dayjs'; +import { ScientificNameTypography } from 'features/surveys/animals/components/ScientificNameTypography'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useCodesContext, useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { IAnimalDeploymentWithCritter } from 'interfaces/useSurveyApi.interface'; +import { useEffect, useMemo, useState } from 'react'; + +// Set height so the skeleton loader will match table rows +const rowHeight = 52; + +interface ITelemetryData { + id: number; + critter_id: number | null; + device_id: number; + frequency: number | null; + frequency_unit: string | null; + start_date: string; + end_date: string; + itis_scientific_name: string; +} + +/** + * Component to display deployment data in a table format. + * + * @returns {*} The rendered component. + */ +export const SurveySpatialDeploymentTable = () => { + const codesContext = useCodesContext(); + const surveyContext = useSurveyContext(); + + const biohubApi = useBiohubApi(); + + const [page, setPage] = useState(0); + const [pageSize, setPageSize] = useState(10); + const [sortModel, setSortModel] = useState([]); + + const deploymentsDataLoader = useDataLoader((page: number, limit: number, sort?: string, order?: 'asc' | 'desc') => + biohubApi.telemetryDeployment.getDeploymentsInSurvey(surveyContext.projectId, surveyContext.surveyId, { + page: page + 1, // This fixes an off-by-one error between the front end and the back end + limit, + sort, + order + }) + ); + + // Page information has changed, fetch more data + useEffect(() => { + if (sortModel.length > 0) { + if (sortModel[0].sort) { + deploymentsDataLoader.refresh(page, pageSize, sortModel[0].field, sortModel[0].sort); + } + } else { + deploymentsDataLoader.refresh(page, pageSize); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [page, pageSize, sortModel]); + + const critterDataLoader = useDataLoader(biohubApi.survey.getSurveyCritters); + + useEffect(() => { + critterDataLoader.load(surveyContext.projectId, surveyContext.surveyId); + }, [deploymentsDataLoader, critterDataLoader, surveyContext.projectId, surveyContext.surveyId]); + + /** + * Merges critters with associated deployments + * + * @returns {ICritterDeployment[]} Critter deployments + */ + const critterDeployments: IAnimalDeploymentWithCritter[] = useMemo(() => { + const critterDeployments: IAnimalDeploymentWithCritter[] = []; + + const critters = critterDataLoader.data ?? []; + const deployments = deploymentsDataLoader.data?.deployments ?? []; + + if (!critters.length || !deployments.length) { + return []; + } + + const critterMap = new Map(critters.map((critter) => [critter.critterbase_critter_id, critter])); + + deployments.forEach((deployment) => { + const critter = critterMap.get(String(deployment.critterbase_critter_id)); + if (critter) { + critterDeployments.push({ critter, deployment }); + } + }); + + return critterDeployments; + }, [critterDataLoader.data, deploymentsDataLoader.data]); + + /** + * Memoized calculation of table rows based on critter deployments data. + * Formats dates and combines necessary fields for display. + */ + const rows: ITelemetryData[] = useMemo(() => { + return critterDeployments.map((item) => { + return { + // Critters in this table may use multiple devices across multiple timespans + id: item.deployment.deployment_id, + critter_id: item.critter.critter_id, + animal_id: item.critter.animal_id, + device_id: item.deployment.device_id, + start_date: item.deployment.attachment_start_date + ? dayjs(item.deployment.attachment_start_date).format(DATE_FORMAT.MediumDateFormat) + : '', + end_date: item.deployment.attachment_end_date + ? dayjs(item.deployment.attachment_end_date).format(DATE_FORMAT.MediumDateFormat) + : '', + frequency: item.deployment.frequency ?? null, + frequency_unit: + codesContext.codesDataLoader.data?.frequency_units?.find( + (frequencyUnit) => frequencyUnit.id === item.deployment.frequency_unit_id + )?.name ?? null, + itis_scientific_name: item.critter.itis_scientific_name + }; + }); + }, [codesContext.codesDataLoader.data?.frequency_units, critterDeployments]); + + const rowCount = deploymentsDataLoader.data?.pagination.total ?? 0; + + // Define table columns + const columns: GridColDef[] = [ + { + field: 'animal_id', + headerName: 'Nickname', + flex: 1 + }, + { + field: 'itis_scientific_name', + headerName: 'Species', + flex: 1, + renderCell: (param) => { + return ( + + ); + } + }, + { + field: 'device_id', + headerName: 'Device ID', + flex: 1 + }, + { + field: 'frequency', + headerName: 'Frequency', + flex: 1, + renderCell: (param) => { + return ( + + {param.row.frequency}  + + {param.row.frequency_unit} + + + ); + } + }, + { + field: 'start_date', + headerName: 'Start Date', + flex: 1 + }, + { + field: 'end_date', + headerName: 'End Date', + flex: 1 + } + ]; + + return ( + } + isLoadingFallbackDelay={100} + hasNoData={!rows.length} + hasNoDataFallback={ + + } + hasNoDataFallbackDelay={100}> + row.id} + autoHeight={false} + // pagination + paginationMode="server" + paginationModel={{ pageSize, page }} + pageSizeOptions={[10, 25, 50]} + onPaginationModelChange={(model) => { + setPage(model.page); + setPageSize(model.pageSize); + }} + // sorting + sortingMode="server" + sortingOrder={['asc', 'desc']} + sortModel={sortModel} + onSortModelChange={(model) => setSortModel(model)} + // misc + checkboxSelection={false} + disableRowSelectionOnClick + disableColumnSelector + disableColumnFilter + disableColumnMenu + disableVirtualization + data-testid="survey-spatial-telemetry-data-table" + /> + + ); +}; diff --git a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetry.tsx b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetry.tsx index db47693eca..ef8cddcc33 100644 --- a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetry.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetry.tsx @@ -1,15 +1,14 @@ import Box from '@mui/material/Box'; import { IStaticLayer, IStaticLayerFeature } from 'components/map/components/StaticLayers'; import { SURVEY_MAP_LAYER_COLOURS } from 'constants/colours'; +import { SurveySpatialTelemetryContainer } from 'features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryContainer'; import { SurveySpatialTelemetryPopup } from 'features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryPopup'; -import { SurveySpatialTelemetryTable } from 'features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryTable'; import SurveyMap from 'features/surveys/view/SurveyMap'; import SurveyMapTooltip from 'features/surveys/view/SurveyMapTooltip'; -import { Position } from 'geojson'; -import { useSurveyContext, useTelemetryDataContext } from 'hooks/useContext'; -import { ICritterSimpleResponse } from 'interfaces/useCritterApi.interface'; -import { IAnimalDeployment, ITelemetry } from 'interfaces/useTelemetryApi.interface'; -import { useCallback, useEffect, useMemo } from 'react'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { useEffect, useMemo } from 'react'; interface ISurveySpatialTelemetryProps { /** @@ -25,112 +24,51 @@ interface ISurveySpatialTelemetryProps { */ export const SurveySpatialTelemetry = (props: ISurveySpatialTelemetryProps) => { const surveyContext = useSurveyContext(); - const telemetryDataContext = useTelemetryDataContext(); - const deploymentDataLoader = telemetryDataContext.deploymentsDataLoader; - const telemetryDataLoader = telemetryDataContext.telemetryDataLoader; + const biohubApi = useBiohubApi(); - // Load deployments data - useEffect(() => { - deploymentDataLoader.load(surveyContext.projectId, surveyContext.surveyId); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [surveyContext.projectId, surveyContext.surveyId]); + const telemetrySpatialDataLoader = useDataLoader((projectId: number, surveyId: number) => + biohubApi.telemetry.getTelemetrySpatialForSurvey(projectId, surveyId) + ); - // Load telemetry data for all deployments useEffect(() => { - if (!deploymentDataLoader.data?.deployments.length) { - // No deployments data, therefore no telemetry data to load - return; + telemetrySpatialDataLoader.load(surveyContext.projectId, surveyContext.surveyId); + }, [surveyContext.projectId, surveyContext.surveyId, telemetrySpatialDataLoader]); + + const points: IStaticLayerFeature[] = useMemo(() => { + const points: IStaticLayerFeature[] = []; + + for (const item of telemetrySpatialDataLoader.data?.telemetry ?? []) { + if (!item.geometry) { + // Skip invalid points + continue; + } + + points.push({ + id: item.telemetry_id, + key: `telemetry-${item.telemetry_id}`, + geoJSON: { + type: 'Feature', + properties: {}, + geometry: item.geometry + } + }); } - telemetryDataLoader.load( - deploymentDataLoader.data?.deployments.map((deployment) => deployment.bctw_deployment_id) ?? [] - ); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [deploymentDataLoader.data]); - - const isLoading = - deploymentDataLoader.isLoading || - !deploymentDataLoader.isReady || - ((telemetryDataLoader.isLoading || !telemetryDataLoader.isReady) && - !!deploymentDataLoader.data?.deployments.length); - - /** - * Combines telemetry, deployment, and critter data into a single list of telemetry points. - * - * @param {ITelemetry[]} telemetry The telemetry data. - * @param {IAnimalDeployment[]} deployments The deployment data. - * @param {ICritterSimpleResponse[]} critters The critter data. - * @returns {IStaticLayerFeature[]} The combined list of telemetry points. - */ - const combineTelemetryData = useCallback( - ( - telemetry: ITelemetry[], - deployments: IAnimalDeployment[], - critters: ICritterSimpleResponse[] - ): IStaticLayerFeature[] => { - return ( - telemetry - ?.filter((telemetry) => telemetry.latitude !== undefined && telemetry.longitude !== undefined) - .reduce( - ( - acc: { - deployment: IAnimalDeployment; - critter: ICritterSimpleResponse; - telemetry: ITelemetry; - }[], - telemetry: ITelemetry - ) => { - const deployment = deployments.find( - (animalDeployment) => animalDeployment.bctw_deployment_id === telemetry.deployment_id - ); - - const critter = critters.find((detailedCritter) => detailedCritter.critter_id === deployment?.critter_id); - - if (critter && deployment) { - acc.push({ deployment, critter, telemetry }); - } + return points; + }, [telemetrySpatialDataLoader.data?.telemetry]); - return acc; - }, - [] - ) - .map(({ telemetry }) => { - return { - id: telemetry.id, - key: `telemetry-id-${telemetry.id}`, - geoJSON: { - type: 'Feature', - properties: {}, - geometry: { - type: 'Point', - coordinates: [telemetry.longitude, telemetry.latitude] as Position - } - } - }; - }) ?? [] - ); - }, - [] - ); - - const telemetryPoints: IStaticLayerFeature[] = useMemo(() => { - const telemetry = telemetryDataLoader.data ?? []; - const deployments = deploymentDataLoader.data?.deployments ?? []; - const critters = surveyContext.critterDataLoader.data ?? []; - - return combineTelemetryData(telemetry, deployments, critters); - }, [combineTelemetryData, surveyContext.critterDataLoader.data, deploymentDataLoader.data, telemetryDataLoader.data]); - - const telemetryLayer: IStaticLayer = { + const layer: IStaticLayer = { layerName: 'Telemetry', layerOptions: { fillColor: SURVEY_MAP_LAYER_COLOURS.TELEMETRY_COLOUR ?? SURVEY_MAP_LAYER_COLOURS.DEFAULT_COLOUR, color: SURVEY_MAP_LAYER_COLOURS.TELEMETRY_COLOUR ?? SURVEY_MAP_LAYER_COLOURS.DEFAULT_COLOUR, opacity: 0.75 }, - features: telemetryPoints, - popup: (feature) => , + features: points, + popup: (feature) => { + return ; + }, tooltip: (feature) => }; @@ -138,12 +76,12 @@ export const SurveySpatialTelemetry = (props: ISurveySpatialTelemetryProps) => { <> {/* Display map with telemetry points */} - + {/* Display data table with telemetry details */} - - + + ); diff --git a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryContainer.tsx b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryContainer.tsx new file mode 100644 index 0000000000..5b48c3ab1d --- /dev/null +++ b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryContainer.tsx @@ -0,0 +1,46 @@ +import { mdiCellphoneMarker, mdiMapMarker } from '@mdi/js'; +import Box from '@mui/material/Box'; +import Divider from '@mui/material/Divider'; +import CustomToggleButtonGroup from 'components/toolbar/CustomToggleButtonGroup'; +import { SurveySpatialDeploymentTable } from 'features/surveys/view/survey-spatial/components/telemetry/SurveySpatialDeploymentTable'; +import { SurveySpatialTelemetryTable } from 'features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryTable'; +import { useState } from 'react'; + +export enum SurveySpatialTelemetryContainerViewEnum { + DEPLOYMENTS = 'DEPLOYMENTS', + TELEMETRY = 'TELEMETRY' +} + +/** + * Renders the container for the survey spatial telemetry table. + * + * @return {*} {JSX.Element} + */ +export const SurveySpatialTelemetryContainer = () => { + const [activeView, setActiveView] = useState( + SurveySpatialTelemetryContainerViewEnum.DEPLOYMENTS + ); + + const views = [ + { label: 'Deployments', value: SurveySpatialTelemetryContainerViewEnum.DEPLOYMENTS, icon: mdiCellphoneMarker }, + { label: 'Telemetry', value: SurveySpatialTelemetryContainerViewEnum.TELEMETRY, icon: mdiMapMarker } + ]; + + return ( + <> + + setActiveView(view)} + orientation="horizontal" + /> + + + + {activeView === SurveySpatialTelemetryContainerViewEnum.DEPLOYMENTS && } + {activeView === SurveySpatialTelemetryContainerViewEnum.TELEMETRY && } + + + ); +}; diff --git a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryPopup.tsx b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryPopup.tsx index b90911ca3d..47c7b28a3e 100644 --- a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryPopup.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryPopup.tsx @@ -1,9 +1,12 @@ import { IStaticLayerFeature } from 'components/map/components/StaticLayers'; import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import dayjs from 'dayjs'; import { SurveyMapPopup } from 'features/surveys/view/SurveyMapPopup'; -import { useSurveyContext, useTelemetryDataContext } from 'hooks/useContext'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import { useSurveyContext } from 'hooks/useContext'; +import useDataLoader from 'hooks/useDataLoader'; +import { IAllTelemetry } from 'interfaces/useTelemetryApi.interface'; import { Popup } from 'react-leaflet'; +import { getFormattedDate } from 'utils/Utils'; export interface ISurveySpatialTelemetryPopupProps { feature: IStaticLayerFeature; @@ -12,8 +15,6 @@ export interface ISurveySpatialTelemetryPopupProps { /** * Renders a popup for telemetry data on the map. * - * TODO: This currently relies on the telemetry, deployment, and critter data loaders to already be loaded. The - * improvement would be to fetch that data when the popup is opened, based on the provided feature ID. * * @param {ISurveySpatialTelemetryPopupProps} props * @return {*} @@ -21,79 +22,47 @@ export interface ISurveySpatialTelemetryPopupProps { export const SurveySpatialTelemetryPopup = (props: ISurveySpatialTelemetryPopupProps) => { const { feature } = props; - const surveyContext = useSurveyContext(); - const telemetryDataContext = useTelemetryDataContext(); - - const deploymentDataLoader = telemetryDataContext.deploymentsDataLoader; - const telemetryDataLoader = telemetryDataContext.telemetryDataLoader; - - const getTelemetryMetadata = () => { - const telemetryId = feature.id; - - const telemetryRecord = telemetryDataLoader.data?.find((telemetry) => telemetry.id === telemetryId); - - if (!telemetryRecord) { - return [{ label: 'Telemetry ID', value: telemetryId }]; - } + const biohubAPi = useBiohubApi(); - const deploymentRecord = deploymentDataLoader.data?.deployments.find( - (deployment) => deployment.bctw_deployment_id === telemetryRecord.deployment_id - ); - - if (!deploymentRecord) { - return [ - { label: 'Telemetry ID', value: telemetryId }, - { - label: 'Location', - value: [telemetryRecord.latitude, telemetryRecord.longitude] - .filter((coord): coord is number => coord !== null) - .map((coord) => coord.toFixed(6)) - .join(', ') - }, - { label: 'Date', value: dayjs(telemetryRecord?.acquisition_date).toISOString() } - ]; - } - - const critterRecord = surveyContext.critterDataLoader.data?.find( - (critter) => critter.critter_id === deploymentRecord.critter_id - ); + const surveyContext = useSurveyContext(); - if (!critterRecord) { - return [ - { label: 'Telemetry ID', value: telemetryId }, - { label: 'Device ID', value: String(deploymentRecord.device_id) }, - { - label: 'Location', - value: [telemetryRecord.latitude, telemetryRecord.longitude] - .filter((coord): coord is number => coord !== null) - .map((coord) => coord.toFixed(6)) - .join(', ') - }, - { label: 'Date', value: dayjs(telemetryRecord?.acquisition_date).toISOString() } - ]; - } + const telemetryDataLoader = useDataLoader((telemetryId: string) => + biohubAPi.telemetry.getTelemetryById(surveyContext.projectId, surveyContext.surveyId, telemetryId) + ); + const getTelemetryMetadata = (telemetry: IAllTelemetry) => { return [ - { label: 'Telemetry ID', value: telemetryId }, - { label: 'Device ID', value: String(deploymentRecord.device_id) }, - { label: 'Nickname', value: critterRecord.animal_id ?? '' }, + { label: 'Telemetry ID', value: telemetry.telemetry_id }, + { label: 'Deployment ID', value: String(telemetry.deployment_id) }, + { label: 'Nickname', value: telemetry.critter_id ?? '' }, { label: 'Location', - value: [telemetryRecord?.latitude, telemetryRecord?.longitude] + value: [telemetry?.latitude, telemetry?.longitude] .filter((coord): coord is number => coord !== null) .map((coord) => coord.toFixed(6)) .join(', ') }, - { label: 'Date', value: dayjs(telemetryRecord?.acquisition_date).format(DATE_FORMAT.LongDateTimeFormat) } + { + label: 'Date', + value: getFormattedDate(DATE_FORMAT.LongDateTimeFormat, telemetry.acquisition_date) + } ]; }; return ( - + { + telemetryDataLoader.load(String(feature.id)); + } + }}> diff --git a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryTable.tsx b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryTable.tsx index 42e09d47a3..e0adf2e8b0 100644 --- a/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryTable.tsx +++ b/app/src/features/surveys/view/survey-spatial/components/telemetry/SurveySpatialTelemetryTable.tsx @@ -1,32 +1,26 @@ import { mdiArrowTopRight } from '@mdi/js'; -import Typography from '@mui/material/Typography'; -import { GridColDef } from '@mui/x-data-grid'; +import { GridColDef, GridSortModel } from '@mui/x-data-grid'; import { StyledDataGrid } from 'components/data-grid/StyledDataGrid'; import { LoadingGuard } from 'components/loading/LoadingGuard'; import { SkeletonTable } from 'components/loading/SkeletonLoaders'; import { NoDataOverlay } from 'components/overlay/NoDataOverlay'; -import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { SurveyContext } from 'contexts/surveyContext'; -import dayjs from 'dayjs'; -import { ScientificNameTypography } from 'features/surveys/animals/components/ScientificNameTypography'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { useTelemetryDataContext } from 'hooks/useContext'; +import { useSurveyContext } from 'hooks/useContext'; import useDataLoader from 'hooks/useDataLoader'; -import { IAnimalDeploymentWithCritter } from 'interfaces/useSurveyApi.interface'; -import { useContext, useEffect, useMemo } from 'react'; +import { useEffect, useState } from 'react'; // Set height so the skeleton loader will match table rows const rowHeight = 52; interface ITelemetryData { - id: number; - critter_id: number | null; - device_id: number; - frequency: number | null; - frequency_unit: string | null; - // start: string; - end: string; - itis_scientific_name: string; + telemetry_id: string; + deployment_id: number; + critter_id: number; + vendor: string; + serial: string; + acquisition_date: string; + latitude: number | null; + longitude: number | null; } /** @@ -35,136 +29,105 @@ interface ITelemetryData { * @returns {*} The rendered component. */ export const SurveySpatialTelemetryTable = () => { - const surveyContext = useContext(SurveyContext); - const telemetryDataContext = useTelemetryDataContext(); + const surveyContext = useSurveyContext(); const biohubApi = useBiohubApi(); - const critterDataLoader = useDataLoader(biohubApi.survey.getSurveyCritters); - const deploymentDataLoader = telemetryDataContext.deploymentsDataLoader; - const frequencyUnitDataLoader = useDataLoader(() => biohubApi.telemetry.getCodeValues('frequency_unit')); + const [totalRows, setTotalRows] = useState(0); + const [page, setPage] = useState(0); + const [pageSize, setPageSize] = useState(10); + const [sortModel, setSortModel] = useState([]); - useEffect(() => { - deploymentDataLoader.load(surveyContext.projectId, surveyContext.surveyId); - critterDataLoader.load(surveyContext.projectId, surveyContext.surveyId); - frequencyUnitDataLoader.load(); - }, [ - critterDataLoader, - deploymentDataLoader, - frequencyUnitDataLoader, - surveyContext.projectId, - surveyContext.surveyId - ]); - - /** - * Merges critters with associated deployments - * - * @returns {ICritterDeployment[]} Critter deployments - */ - const critterDeployments: IAnimalDeploymentWithCritter[] = useMemo(() => { - const critterDeployments: IAnimalDeploymentWithCritter[] = []; - const critters = critterDataLoader.data ?? []; - const deployments = deploymentDataLoader.data?.deployments ?? []; + const telemetryDataLoader = useDataLoader((page: number, limit: number, sort?: string, order?: 'asc' | 'desc') => + biohubApi.telemetry.getTelemetryForSurvey(surveyContext.projectId, surveyContext.surveyId, { + page: page + 1, // This fixes an off-by-one error between the front end and the back end + limit, + sort, + order + }) + ); - if (!critters.length || !deployments.length) { - return []; + // Page information has changed, fetch more data + useEffect(() => { + if (sortModel.length > 0) { + if (sortModel[0].sort) { + telemetryDataLoader.refresh(page, pageSize, sortModel[0].field, sortModel[0].sort); + } + } else { + telemetryDataLoader.refresh(page, pageSize); } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [page, pageSize, sortModel]); - const critterMap = new Map(critters.map((critter) => [critter.critterbase_critter_id, critter])); - - deployments.forEach((deployment) => { - const critter = critterMap.get(String(deployment.critterbase_critter_id)); - if (critter) { - critterDeployments.push({ critter, deployment }); - } - }); + useEffect(() => { + if (!telemetryDataLoader.data) { + return; + } - return critterDeployments; - }, [critterDataLoader.data, deploymentDataLoader.data]); + setTotalRows(telemetryDataLoader.data.pagination.total); + }, [telemetryDataLoader.data]); - /** - * Memoized calculation of table rows based on critter deployments data. - * Formats dates and combines necessary fields for display. - */ - const rows: ITelemetryData[] = useMemo(() => { - return critterDeployments.map((item) => { + const rows: ITelemetryData[] = + telemetryDataLoader.data?.telemetry.map((item) => { return { - // Critters in this table may use multiple devices across multiple timespans - id: item.deployment.deployment_id, - critter_id: item.critter.critter_id, - animal_id: item.critter.animal_id, - device_id: item.deployment.device_id, - // start: dayjs(item.deployment.attachment_start).format(DATE_FORMAT.MediumDateFormat), - end: item.deployment.attachment_end_date - ? dayjs(item.deployment.attachment_end_date).format(DATE_FORMAT.MediumDateFormat) - : '', - frequency: item.deployment.frequency ?? null, - frequency_unit: item.deployment.frequency_unit - ? frequencyUnitDataLoader.data?.find((frequencyCode) => frequencyCode.id === item.deployment.frequency_unit) - ?.code ?? null - : null, - itis_scientific_name: item.critter.itis_scientific_name + telemetry_id: item.telemetry_id, + deployment_id: item.deployment_id, + critter_id: item.critter_id, + vendor: item.vendor, + serial: item.serial, + acquisition_date: item.acquisition_date, + latitude: item.latitude, + longitude: item.longitude }; - }); - }, [critterDeployments, frequencyUnitDataLoader.data]); + }) ?? []; // Define table columns const columns: GridColDef[] = [ { - field: 'animal_id', - headerName: 'Nickname', + field: 'telemetry_id', + headerName: 'Telemetry ID', flex: 1 }, { - field: 'itis_scientific_name', - headerName: 'Species', - flex: 1, - renderCell: (param) => { - return ( - - ); - } + field: 'deployment_id', + headerName: 'Deployment ID', + flex: 1 }, { - field: 'device_id', - headerName: 'Device ID', + field: 'critter_id', + headerName: 'Critter ID', flex: 1 }, { - field: 'frequency', - headerName: 'Frequency', - flex: 1, - renderCell: (param) => { - return ( - - {param.row.frequency}  - - {param.row.frequency_unit} - - - ); - } + field: 'serial', + headerName: 'Device', + flex: 1 }, { - field: 'start', - headerName: 'Start Date', + field: 'vendor', + headerName: 'Vendor', flex: 1 }, { - field: 'end', - headerName: 'End Date', + field: 'acquisition_date', + headerName: 'Date', + flex: 1 + }, + { + field: 'latitude', + headerName: 'Latitude', + flex: 1 + }, + { + field: 'longitude', + headerName: 'Longitude', flex: 1 } ]; return ( } isLoadingFallbackDelay={100} hasNoData={!rows.length} @@ -179,25 +142,36 @@ export const SurveySpatialTelemetryTable = () => { hasNoDataFallbackDelay={100}> row.id} - columns={columns} - initialState={{ - pagination: { - paginationModel: { page: 1, pageSize: 5 } - } - }} - pageSizeOptions={[5]} + rowCount={totalRows} + rowHeight={rowHeight} rowSelection={false} + getRowId={(row) => row.telemetry_id} + autoHeight={false} + // pagination + paginationMode="server" + paginationModel={{ pageSize, page }} + pageSizeOptions={[10, 25, 50]} + onPaginationModelChange={(model) => { + setPage(model.page); + setPageSize(model.pageSize); + }} + // sorting + sortingMode="server" + sortingOrder={['asc', 'desc']} + sortModel={sortModel} + onSortModelChange={(model) => setSortModel(model)} + // misc checkboxSelection={false} disableRowSelectionOnClick disableColumnSelector disableColumnFilter disableColumnMenu disableVirtualization - sortingOrder={['asc', 'desc']} data-testid="survey-spatial-telemetry-data-table" /> diff --git a/app/src/hooks/api/useSurveyApi.test.ts b/app/src/hooks/api/useSurveyApi.test.ts index 26c4814064..c30c3fd2cf 100644 --- a/app/src/hooks/api/useSurveyApi.test.ts +++ b/app/src/hooks/api/useSurveyApi.test.ts @@ -9,7 +9,6 @@ import { SurveyBasicFieldsObject } from 'interfaces/useSurveyApi.interface'; import { ApiPaginationResponseParams } from 'types/misc'; -import { v4 } from 'uuid'; import useSurveyApi from './useSurveyApi'; describe('useSurveyApi', () => { @@ -26,7 +25,6 @@ describe('useSurveyApi', () => { const projectId = 1; const surveyId = 1; const critterId = 1; - const deploymentId = 1; describe('createSurvey', () => { it('creates a survey', async () => { @@ -93,85 +91,6 @@ describe('useSurveyApi', () => { }); }); - describe('createDeployment', () => { - it('should add deployment to survey critter', async () => { - mock.onPost(`/api/project/${projectId}/survey/${surveyId}/critters/${critterId}/deployments`).reply(201, 1); - - const result = await useSurveyApi(axios).createDeployment(projectId, surveyId, critterId, { - device_id: 1, - device_make: 22, - device_model: 'E', - frequency: 1, - frequency_unit: 33, - critterbase_start_capture_id: '', - critterbase_end_capture_id: '', - critterbase_end_mortality_id: '', - attachment_end_date: '', - attachment_end_time: '' - }); - - expect(result).toBe(1); - }); - }); - - describe('getDeploymentsInSurvey', () => { - it('should get one deployment', async () => { - const response = { - deployments: [ - { - assignment_id: v4(), - collar_id: v4(), - critterbase_critter_id: v4(), - critter_id: 123, - critterbase_start_capture_id: '', - critterbase_end_capture_id: '', - critterbase_end_mortality_id: '', - attachment_start_date: '', - attachment_start_time: '', - attachment_end_date: '', - attachment_end_time: '', - deployment_id: 123, - bctw_deployment_id: v4(), - device_id: 123, - device_make: 22, - device_model: 'a', - frequency: 1, - frequency_unit: 33 - } - ], - bad_deployments: [] - }; - - mock.onGet(`/api/project/${projectId}/survey/${surveyId}/deployments`).reply(200, response); - - const result = await useSurveyApi(axios).getDeploymentsInSurvey(projectId, surveyId); - - expect(result.deployments.length).toBe(1); - expect(result.deployments[0].device_id).toBe(123); - }); - }); - - describe('updateDeployment', () => { - it('should update a deployment', async () => { - mock.onPut(`/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`).reply(200, 1); - const result = await useSurveyApi(axios).updateDeployment(projectId, surveyId, deploymentId, { - critter_id: 1, - critterbase_start_capture_id: '', - critterbase_end_capture_id: '', - critterbase_end_mortality_id: '', - attachment_end_date: '', - attachment_end_time: '', - frequency: 10.5, - frequency_unit: 44, - device_id: 1, - device_make: 22, - device_model: '' - }); - - expect(result).toBe(1); - }); - }); - describe('getSurveyCritters', () => { it('should get critters', async () => { const response = [ diff --git a/app/src/hooks/api/useSurveyApi.ts b/app/src/hooks/api/useSurveyApi.ts index f7557da36c..e8478dc509 100644 --- a/app/src/hooks/api/useSurveyApi.ts +++ b/app/src/hooks/api/useSurveyApi.ts @@ -5,7 +5,6 @@ import { ISurveyCritter } from 'contexts/animalPageContext'; import { ISurveyAdvancedFilters } from 'features/summary/list-data/survey/SurveysListFilterForm'; import { ICreateCritter } from 'features/surveys/view/survey-animals/animal'; import { SurveyExportConfig } from 'features/surveys/view/survey-export/SurveyExportForm'; -import { WarningSchema } from 'interfaces/useBioHubApi.interface'; import { ICritterDetailedResponse, ICritterSimpleResponse } from 'interfaces/useCritterApi.interface'; import { IGetReportDetails, IUploadAttachmentResponse } from 'interfaces/useProjectApi.interface'; import { @@ -17,11 +16,7 @@ import { IGetSurveyForViewResponse, IUpdateSurveyRequest } from 'interfaces/useSurveyApi.interface'; -import { - IAllTelemetryPointCollection, - IAnimalDeployment, - ICreateAnimalDeploymentPostData -} from 'interfaces/useTelemetryApi.interface'; +import { IAllTelemetryPointCollection } from 'interfaces/useTelemetryApi.interface'; import qs from 'qs'; import { ApiPaginationRequestOptions } from 'types/misc'; @@ -373,16 +368,21 @@ const useSurveyApi = (axios: AxiosInstance) => { * @param {number} projectId * @param {number} surveyId * @param {number} critterId + * @param {string} expand List of related resources to include in the response * @return {*} {Promise} */ const getCritterById = async ( projectId: number, surveyId: number, - critterId: number + critterId: number, + expand?: ['attachments'] ): Promise => { - const { data } = await axios.get( - `/api/project/${projectId}/survey/${surveyId}/critters/${critterId}?format=detailed` - ); + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/critters/${critterId}`, { + params: { + format: 'detailed', + expand: expand + } + }); return data; }; @@ -438,91 +438,6 @@ const useSurveyApi = (axios: AxiosInstance) => { return data; }; - /** - * Create a new deployment with associated device hardware metadata. Must include critterbase critter id. - * - * @param {number} projectId - * @param {number} surveyId - * @param {number} critterId - * @param {Omit} body - * @return {*} {Promise<{ deploymentId: number }>} - */ - const createDeployment = async ( - projectId: number, - surveyId: number, - critterId: number, - body: Omit - ): Promise<{ deploymentId: number }> => { - const { data } = await axios.post( - `/api/project/${projectId}/survey/${surveyId}/critters/${critterId}/deployments`, - body - ); - return data; - }; - - /** - * Update a deployment with a new time span. - * - * @param {number} projectId - * @param {number} surveyId - * @param {number} deploymentId - * @param {ICreateAnimalDeploymentPostData} body - * @return {*} {Promise} - */ - const updateDeployment = async ( - projectId: number, - surveyId: number, - deploymentId: number, - body: ICreateAnimalDeploymentPostData - ): Promise => { - const { data } = await axios.put(`/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`, body); - return data; - }; - - /** - * Get all deployments associated with the given survey ID. - * - * @param {number} projectId - * @param {number} surveyId - * @return {*} {Promise<{ - * deployments: IAnimalDeployment[]; - * bad_deployments: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }>[]; - * }>} - */ - const getDeploymentsInSurvey = async ( - projectId: number, - surveyId: number - ): Promise<{ - deployments: IAnimalDeployment[]; - bad_deployments: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }>[]; - }> => { - const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/deployments`); - return data; - }; - - /** - * Get deployment by Id, using the integer Id from SIMS instead of the BCTW GUID - * - * @param {number} projectId - * @param {number} surveyId - * @param {number} deploymentId - * @return {*} {(Promise< - * | { deployment: IAnimalDeployment; bad_deployment: null } - * | { deployment: null; bad_deployment: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }> } - * >)} - */ - const getDeploymentById = async ( - projectId: number, - surveyId: number, - deploymentId: number - ): Promise< - | { deployment: IAnimalDeployment; bad_deployment: null } - | { deployment: null; bad_deployment: WarningSchema<{ sims_deployment_id: number; bctw_deployment_id: string }> } - > => { - const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`); - return data; - }; - /** * Get all telemetry points for a critter in a survey within a given time span. * @@ -548,56 +463,6 @@ const useSurveyApi = (axios: AxiosInstance) => { return data; }; - /** - * Ends a deployment. Will trigger removal in both SIMS and BCTW. - * - * @param {number} projectId - * @param {number} surveyId - * @param {number} critterId - * @param {number} deploymentId - * @return {*} {Promise} - */ - const endDeployment = async ( - projectId: number, - surveyId: number, - critterId: number, - deploymentId: number - ): Promise => { - const { data } = await axios.delete( - `/api/project/${projectId}/survey/${surveyId}/critters/${critterId}/deployments/${deploymentId}` - ); - return data; - }; - - /** - * Deletes a deployment. Will trigger deletion in SIMS and invalidates the deployment in BCTW. - * - * @param {number} projectId - * @param {number} surveyId - * @param {number} deploymentId - * @return {*} {Promise} - */ - const deleteDeployment = async (projectId: number, surveyId: number, deploymentId: number): Promise => { - const { data } = await axios.delete(`/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`); - return data; - }; - - /** - * Deletes a list of deployments. Will trigger deletion in SIMS and invalidates the deployments in BCTW. - * - * @param {number} projectId - * @param {number} surveyId - * @param {number[]} deploymentIds - * @return {*} {Promise} - */ - const deleteDeployments = async (projectId: number, surveyId: number, deploymentIds: number[]): Promise => { - const { data } = await axios.post(`/api/project/${projectId}/survey/${surveyId}/deployments/delete`, { - deployment_ids: deploymentIds - }); - - return data; - }; - /** * Bulk upload Critters from CSV. * @@ -740,7 +605,6 @@ const useSurveyApi = (axios: AxiosInstance) => { getSurveysBasicFieldsByProjectId, getSurveyForUpdate, findSurveys, - getDeploymentById, updateSurvey, uploadSurveyAttachments, uploadSurveyReports, @@ -753,19 +617,13 @@ const useSurveyApi = (axios: AxiosInstance) => { getSurveyCritters, createCritterAndAddToSurvey, removeCrittersFromSurvey, - createDeployment, getSurveyCrittersDetailed, - getDeploymentsInSurvey, getCritterById, - updateDeployment, getCritterTelemetry, importCrittersFromCsv, importCapturesFromCsv, importMarkingsFromCsv, importMeasurementsFromCsv, - endDeployment, - deleteDeployment, - deleteDeployments, exportData }; }; diff --git a/app/src/hooks/api/useTelemetryApi.test.ts b/app/src/hooks/api/useTelemetryApi.test.ts index 2d518c4b2c..6d8afb8cf5 100644 --- a/app/src/hooks/api/useTelemetryApi.test.ts +++ b/app/src/hooks/api/useTelemetryApi.test.ts @@ -19,16 +19,15 @@ describe('useTelemetryApi', () => { telemetry: [ { telemetry_id: '123', + deployment_id: 3, + critter_id: 2, + vendor: 'lotek', + serial: '12345', acquisition_date: '2021-01-01', latitude: 49.123, longitude: -126.123, - telemetry_type: 'vendor', - device_id: 12345, - bctw_deployment_id: '123-123-123', - critter_id: 2, - deployment_id: 3, - critterbase_critter_id: '345-345-345-', - animal_id: '567234-234' + elevation: 100, + temperature: null } ], pagination: { @@ -46,28 +45,6 @@ describe('useTelemetryApi', () => { expect(result).toEqual(mockResponse); }); - describe('getCodeValues', () => { - it('should return a list of code values', async () => { - const mockCodeValues = { - code_header_title: 'code_header_title', - code_header_name: 'code_header_name', - id: 123, - description: 'description', - long_description: 'long_description' - }; - - mock.onGet('/api/telemetry/code?codeHeader=code_header_name').reply(200, [mockCodeValues]); - const result = await useTelemetryApi(axios).getCodeValues('code_header_name'); - expect(result).toEqual([mockCodeValues]); - }); - - it('should catch errors', async () => { - mock.onGet('/api/telemetry/code?codeHeader=code_header_name').reply(500, 'error'); - const result = await useTelemetryApi(axios).getCodeValues('code_header_name'); - expect(result).toEqual([]); - }); - }); - describe('uploadTelemetryDeviceCredentialFile', () => { it('should upload a keyx file', async () => { const projectId = 1; diff --git a/app/src/hooks/api/useTelemetryApi.ts b/app/src/hooks/api/useTelemetryApi.ts index 62d2d9e22b..3e8a2c8747 100644 --- a/app/src/hooks/api/useTelemetryApi.ts +++ b/app/src/hooks/api/useTelemetryApi.ts @@ -2,13 +2,13 @@ import { AxiosInstance, AxiosProgressEvent, CancelTokenSource } from 'axios'; import { IAllTelemetryAdvancedFilters } from 'features/summary/tabular-data/telemetry/TelemetryListFilterForm'; import { IUploadAttachmentResponse } from 'interfaces/useProjectApi.interface'; import { + GetSurveyTelemetryResponse, IAllTelemetry, - ICodeResponse, ICreateManualTelemetry, IFindTelemetryResponse, - IManualTelemetry, IUpdateManualTelemetry, - TelemetryDeviceKeyFile + TelemetryDeviceKeyFile, + TelemetrySpatial } from 'interfaces/useTelemetryApi.interface'; import qs from 'qs'; import { ApiPaginationRequestOptions } from 'types/misc'; @@ -42,53 +42,117 @@ const useTelemetryApi = (axios: AxiosInstance) => { }; /** - * Get list of manual and vendor telemetry by deployment ids + * Get a telemetry record by id. * - * @param {string[]} deploymentIds BCTW deployment ids - * @return {*} {Promise} + * @param {number} projectId + * @param {number} surveyId + * @param {number} telemetryId The telemetry record ID (uuid) + * @return {*} {Promise<{ telemetry: IAllTelemetry }>} + */ + const getTelemetryById = async ( + projectId: number, + surveyId: number, + telemetryId: string + ): Promise<{ telemetry: IAllTelemetry }> => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/telemetry/${telemetryId}`); + + return data; + }; + + /** + * Get all telemetry for a survey. + * + * @param {number} projectId + * @param {number} surveyId + * @param {ApiPaginationRequestOptions} [pagination] + * @return {*} {Promise} */ - const getAllTelemetryByDeploymentIds = async (deploymentIds: string[]): Promise => { - const { data } = await axios.get('/api/telemetry/deployments', { + const getTelemetryForSurvey = async ( + projectId: number, + surveyId: number, + pagination?: ApiPaginationRequestOptions + ): Promise => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/telemetry`, { params: { - bctwDeploymentIds: deploymentIds - } + ...pagination + }, + paramsSerializer: (params) => qs.stringify(params) }); + + return data; + }; + + /** + * Get all telemetry spatial data for a survey. + * + * @param {number} projectId + * @param {number} surveyId + * @return {*} {Promise<{ telemetry: TelemetrySpatial[]; supplementaryData: { count: number } }>} + */ + const getTelemetrySpatialForSurvey = async ( + projectId: number, + surveyId: number + ): Promise<{ telemetry: TelemetrySpatial[]; supplementaryData: { count: number } }> => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/telemetry/spatial`); + return data; }; /** - * Bulk create Manual Telemetry + * Bulk create Manual Telemetry records. * + * @param {number} projectId + * @param {number} surveyIdF * @param {ICreateManualTelemetry[]} manualTelemetry Manual Telemetry create objects - * @return {*} {Promise} + * @return {*} {Promise} */ const createManualTelemetry = async ( + projectId: number, + surveyId: number, manualTelemetry: ICreateManualTelemetry[] - ): Promise => { - const { data } = await axios.post('/api/telemetry/manual', manualTelemetry); - return data; + ): Promise => { + await axios.post(`/api/project/${projectId}/survey/${surveyId}/deployments/telemetry/manual`, { + telemetry: manualTelemetry + }); + + return; }; /** - * Bulk update Manual Telemetry + * Bulk update Manual Telemetry records. * + * @param {number} projectId + * @param {number} surveyId * @param {IUpdateManualTelemetry[]} manualTelemetry Manual Telemetry update objects - * @return {*} + * @return {*} {Promise} */ - const updateManualTelemetry = async (manualTelemetry: IUpdateManualTelemetry[]) => { - const { data } = await axios.patch('/api/telemetry/manual', manualTelemetry); - return data; + const updateManualTelemetry = async ( + projectId: number, + surveyId: number, + manualTelemetry: IUpdateManualTelemetry[] + ): Promise => { + await axios.put(`/api/project/${projectId}/survey/${surveyId}/deployments/telemetry/manual`, { + telemetry: manualTelemetry + }); + + return; }; /** - * Delete manual telemetry records + * Bulk delete manual telemetry records. + * + * @param {number} projectId + * @param {number} surveyId * * @param {string[]} telemetryIds Manual Telemetry ids to delete - * @return {*} + * @return {*} {Promise} */ - const deleteManualTelemetry = async (telemetryIds: string[]) => { - const { data } = await axios.post('/api/telemetry/manual/delete', telemetryIds); - return data; + const deleteManualTelemetry = async (projectId: number, surveyId: number, telemetryIds: string[]): Promise => { + await axios.post(`/api/project/${projectId}/survey/${surveyId}/deployments/telemetry/manual/delete`, { + telemetry_manual_ids: telemetryIds + }); + + return; }; /** @@ -126,6 +190,8 @@ const useTelemetryApi = (axios: AxiosInstance) => { /** * Begins processing an uploaded telemetry CSV for import * + * @TODO Update to use new API endpoints (bctw migration feature) + * * @param {number} submissionId * @return {*} */ @@ -137,24 +203,6 @@ const useTelemetryApi = (axios: AxiosInstance) => { return data; }; - /** - * Returns a list of code values for a given code header. - * - * @param {string} codeHeader - * @return {*} {Promise} - */ - const getCodeValues = async (codeHeader: string): Promise => { - try { - const { data } = await axios.get(`/api/telemetry/code?codeHeader=${codeHeader}`); - return data; - } catch (e) { - if (e instanceof Error) { - console.error(e.message); - } - } - return []; - }; - /** * Upload a telemetry device credential file. * @@ -205,13 +253,14 @@ const useTelemetryApi = (axios: AxiosInstance) => { return { findTelemetry, - getAllTelemetryByDeploymentIds, + getTelemetryById, + getTelemetryForSurvey, + getTelemetrySpatialForSurvey, createManualTelemetry, updateManualTelemetry, deleteManualTelemetry, uploadCsvForImport, processTelemetryCsvSubmission, - getCodeValues, uploadTelemetryDeviceCredentialFile, getTelemetryDeviceKeyFiles }; diff --git a/app/src/hooks/api/useTelemetryDeploymentApi.ts b/app/src/hooks/api/useTelemetryDeploymentApi.ts new file mode 100644 index 0000000000..47b81d62e6 --- /dev/null +++ b/app/src/hooks/api/useTelemetryDeploymentApi.ts @@ -0,0 +1,143 @@ +import { AxiosInstance } from 'axios'; +import { + CreateTelemetryDeployment, + GetSurveyDeploymentsResponse, + TelemetryDeployment, + UpdateTelemetryDeployment +} from 'interfaces/useTelemetryDeploymentApi.interface'; +import qs from 'qs'; +import { ApiPaginationRequestOptions } from 'types/misc'; + +/** + * Returns a set of supported api methods for working with telemetry deployments. + * + * @param {AxiosInstance} axios + * @return {*} object whose properties are supported api methods. + */ +export const useTelemetryDeploymentApi = (axios: AxiosInstance) => { + /** + * Create a new telemetry deployment. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} critterId + * @param {CreateTelemetryDeployment} deployment + * @return {*} {Promise} + */ + const createDeployment = async ( + projectId: number, + surveyId: number, + critterId: number, + deployment: CreateTelemetryDeployment + ): Promise => { + const { data } = await axios.post( + `/api/project/${projectId}/survey/${surveyId}/critters/${critterId}/deployments`, + deployment + ); + + return data; + }; + + /** + * Update a telemetry deployment. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} deploymentId + * @param {UpdateTelemetryDeployment} deployment + * @return {*} {Promise} + */ + const updateDeployment = async ( + projectId: number, + surveyId: number, + deploymentId: number, + deployment: UpdateTelemetryDeployment + ): Promise => { + const { data } = await axios.put( + `/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`, + deployment + ); + + return data; + }; + + /** + * Get a telemetry deployment by Id. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} deploymentId + * @return {*} {Promise<{ deployment: TelemetryDeployment }>} + */ + const getDeploymentById = async ( + projectId: number, + surveyId: number, + deploymentId: number + ): Promise<{ deployment: TelemetryDeployment }> => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`); + + return data; + }; + + /** + * Get all telemetry deployments associated with the given survey ID. + * + * @param {number} projectId + * @param {number} surveyId + * @param {ApiPaginationRequestOptions} [pagination] + * @return {*} {Promise} + */ + const getDeploymentsInSurvey = async ( + projectId: number, + surveyId: number, + pagination?: ApiPaginationRequestOptions + ): Promise => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/deployments`, { + params: { + ...pagination + }, + paramsSerializer: (params) => qs.stringify(params) + }); + + return data; + }; + + /** + * Delete a telemetry deployment. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} deploymentId + * @return {*} {Promise} + */ + const deleteDeployment = async (projectId: number, surveyId: number, deploymentId: number): Promise => { + const { data } = await axios.delete(`/api/project/${projectId}/survey/${surveyId}/deployments/${deploymentId}`); + + return data; + }; + + /** + * Delete one ore more telemetry deployments. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number[]} deploymentIds + * @return {*} {Promise} + */ + const deleteDeployments = async (projectId: number, surveyId: number, deploymentIds: number[]): Promise => { + const { data } = await axios.post(`/api/project/${projectId}/survey/${surveyId}/deployments/delete`, { + deployment_ids: deploymentIds + }); + + return data; + }; + + return { + createDeployment, + updateDeployment, + getDeploymentById, + getDeploymentsInSurvey, + deleteDeployment, + deleteDeployments + }; +}; diff --git a/app/src/hooks/api/useTelemetryDeviceApi.ts b/app/src/hooks/api/useTelemetryDeviceApi.ts new file mode 100644 index 0000000000..aa6191b65b --- /dev/null +++ b/app/src/hooks/api/useTelemetryDeviceApi.ts @@ -0,0 +1,180 @@ +import { AxiosInstance, AxiosProgressEvent, CancelTokenSource } from 'axios'; +import { IUploadAttachmentResponse } from 'interfaces/useProjectApi.interface'; +import { TelemetryDeviceKeyFile } from 'interfaces/useTelemetryApi.interface'; +import { + CreateTelemetryDevice, + TelemetryDevice, + UpdateTelemetryDevice +} from 'interfaces/useTelemetryDeviceApi.interface'; +import { ApiPaginationRequestOptions, ApiPaginationResponseParams } from 'types/misc'; + +/** + * Returns a set of supported api methods for working with telemetry devices. + * + * @param {AxiosInstance} axios + * @return {*} object whose properties are supported api methods. + */ +export const useTelemetryDeviceApi = (axios: AxiosInstance) => { + /** + * Create a new telemetry device. + * + * @param {number} projectId + * @param {number} surveyId + * @param {CreateTelemetryDevice} device + * @return {*} {Promise} + */ + const createDevice = async (projectId: number, surveyId: number, device: CreateTelemetryDevice): Promise => { + const { data } = await axios.post(`/api/project/${projectId}/survey/${surveyId}/devices`, device); + + return data; + }; + + /** + * Update a telemetry device. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} deviceId + * @param {UpdateTelemetryDevice} device + * @return {*} {Promise} + */ + const updateDevice = async ( + projectId: number, + surveyId: number, + deviceId: number, + device: UpdateTelemetryDevice + ): Promise => { + const { data } = await axios.put(`/api/project/${projectId}/survey/${surveyId}/devices/${deviceId}`, device); + + return data; + }; + + /** + * Get a telemetry device. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} deviceId + * @return {*} {Promise<{ device: TelemetryDevice }>} + */ + const getDeviceById = async ( + projectId: number, + surveyId: number, + deviceId: number + ): Promise<{ device: TelemetryDevice }> => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/devices/${deviceId}`); + + return data; + }; + + /** + * Get all telemetry devices associated with the given survey ID. + * + * @param {number} projectId + * @param {number} surveyId + * @param {ApiPaginationRequestOptions} [pagination] + * @return {*} {Promise<{ devices: TelemetryDevice[]; count: number; pagination: ApiPaginationResponseParams }>} + */ + const getDevicesInSurvey = async ( + projectId: number, + surveyId: number, + pagination?: ApiPaginationRequestOptions + ): Promise<{ devices: TelemetryDevice[]; count: number; pagination: ApiPaginationResponseParams }> => { + const { data } = await axios.get(`/api/project/${projectId}/survey/${surveyId}/devices`, { + params: { + ...pagination + } + }); + + return data; + }; + + /** + * Delete a telemetry device. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number} deviceId + * @return {*} {Promise} + */ + const deleteDevice = async (projectId: number, surveyId: number, deviceId: number): Promise => { + const { data } = await axios.delete(`/api/project/${projectId}/survey/${surveyId}/devices/${deviceId}`); + + return data; + }; + + /** + * Delete one or more telemetry devices. + * + * @param {number} projectId + * @param {number} surveyId + * @param {number[]} deviceIds + * @return {*} {Promise} + */ + const deleteDevices = async (projectId: number, surveyId: number, deviceIds: number[]): Promise => { + const { data } = await axios.post(`/api/project/${projectId}/survey/${surveyId}/devices/delete`, { + device_ids: deviceIds + }); + + return data; + }; + + /** + * Upload a telemetry device credential file. + * + * @param {number} projectId + * @param {number} surveyId + * @param {File} file + * @param {CancelTokenSource} [cancelTokenSource] + * @param {(progressEvent: AxiosProgressEvent) => void} [onProgress] + * @return {*} {Promise} + */ + const uploadTelemetryDeviceCredentialFile = async ( + projectId: number, + surveyId: number, + file: File, + cancelTokenSource?: CancelTokenSource, + onProgress?: (progressEvent: AxiosProgressEvent) => void + ): Promise => { + const req_message = new FormData(); + + req_message.append('media', file); + + const { data } = await axios.post( + `/api/project/${projectId}/survey/${surveyId}/attachments/telemetry`, + req_message, + { + cancelToken: cancelTokenSource?.token, + onUploadProgress: onProgress + } + ); + + return data; + }; + + /** + * Get all uploaded telemetry device credential key files. + * + * @param {number} projectId + * @param {number} surveyId + * @return {*} {Promise} + */ + const getTelemetryDeviceKeyFiles = async (projectId: number, surveyId: number): Promise => { + const { data } = await axios.get<{ telemetryAttachments: TelemetryDeviceKeyFile[] }>( + `/api/project/${projectId}/survey/${surveyId}/attachments/telemetry` + ); + + return data.telemetryAttachments; + }; + + return { + createDevice, + updateDevice, + getDeviceById, + getDevicesInSurvey, + deleteDevice, + deleteDevices, + uploadTelemetryDeviceCredentialFile, + getTelemetryDeviceKeyFiles + }; +}; diff --git a/app/src/hooks/useBioHubApi.ts b/app/src/hooks/useBioHubApi.ts index 37786124f6..710e346e24 100644 --- a/app/src/hooks/useBioHubApi.ts +++ b/app/src/hooks/useBioHubApi.ts @@ -1,5 +1,7 @@ import axios from 'axios'; import useReferenceApi from 'hooks/api/useReferenceApi'; +import { useTelemetryDeploymentApi } from 'hooks/api/useTelemetryDeploymentApi'; +import { useTelemetryDeviceApi } from 'hooks/api/useTelemetryDeviceApi'; import { useConfigContext } from 'hooks/useContext'; import { useMemo } from 'react'; import useAdminApi from './api/useAdminApi'; @@ -74,6 +76,10 @@ export const useBiohubApi = () => { const telemetry = useTelemetryApi(apiAxios); + const telemetryDeployment = useTelemetryDeploymentApi(apiAxios); + + const telemetryDevice = useTelemetryDeviceApi(apiAxios); + const markdown = useMarkdownApi(apiAxios); const alert = useAlertApi(apiAxios); @@ -100,6 +106,8 @@ export const useBiohubApi = () => { standards, reference, telemetry, + telemetryDeployment, + telemetryDevice, markdown, alert }), diff --git a/app/src/hooks/useContext.tsx b/app/src/hooks/useContext.tsx index 39da771e41..d371a965b9 100644 --- a/app/src/hooks/useContext.tsx +++ b/app/src/hooks/useContext.tsx @@ -8,7 +8,6 @@ import { IObservationsTableContext, ObservationsTableContext } from 'contexts/ob import { IProjectContext, ProjectContext } from 'contexts/projectContext'; import { ISurveyContext, SurveyContext } from 'contexts/surveyContext'; import { ITaxonomyContext, TaxonomyContext } from 'contexts/taxonomyContext'; -import { ITelemetryDataContext, TelemetryDataContext } from 'contexts/telemetryDataContext'; import { IAllTelemetryTableContext, TelemetryTableContext } from 'contexts/telemetryTableContext'; import { useContext } from 'react'; @@ -148,23 +147,6 @@ export const useObservationsTableContext = (): IObservationsTableContext => { return context; }; -/** - * Returns an instance of `ITelemetryDataContext` from `TelemetryDataContext`. - * - * @return {*} {ITelemetryDataContext} - */ -export const useTelemetryDataContext = (): ITelemetryDataContext => { - const context = useContext(TelemetryDataContext); - - if (!context) { - throw Error( - 'TelemetryDataContext is undefined, please verify you are calling useTelemetryDataContext() as child of an component.' - ); - } - - return context; -}; - /** * Returns an instance of `ITelemetryTableContext` from `TelemetryTableContext`. * diff --git a/app/src/interfaces/useCodesApi.interface.ts b/app/src/interfaces/useCodesApi.interface.ts index a053431566..730f329d4b 100644 --- a/app/src/interfaces/useCodesApi.interface.ts +++ b/app/src/interfaces/useCodesApi.interface.ts @@ -41,5 +41,7 @@ export interface IGetAllCodeSetsResponse { method_response_metrics: CodeSet<{ id: number; name: string; description: string }>; attractants: CodeSet<{ id: number; name: string; description: string }>; observation_subcount_signs: CodeSet<{ id: number; name: string; description: string }>; + telemetry_device_makes: CodeSet<{ id: number; name: string; description: string }>; + frequency_units: CodeSet<{ id: number; name: string; description: string }>; alert_types: CodeSet<{ id: number; name: string; description: string }>; } diff --git a/app/src/interfaces/useSurveyApi.interface.ts b/app/src/interfaces/useSurveyApi.interface.ts index 28c0958d07..49c98ad137 100644 --- a/app/src/interfaces/useSurveyApi.interface.ts +++ b/app/src/interfaces/useSurveyApi.interface.ts @@ -13,7 +13,7 @@ import { ISpeciesForm, ITaxonomyWithEcologicalUnits } from 'features/surveys/com import { ISurveyPartnershipsForm } from 'features/surveys/view/components/SurveyPartnershipsForm'; import { Feature } from 'geojson'; import { ITaxonomy } from 'interfaces/useTaxonomyApi.interface'; -import { IAnimalDeployment } from 'interfaces/useTelemetryApi.interface'; +import { TelemetryDeployment } from 'interfaces/useTelemetryDeploymentApi.interface'; import { ApiPaginationResponseParams, StringBoolean } from 'types/misc'; import { ICritterDetailedResponse, ICritterSimpleResponse } from './useCritterApi.interface'; @@ -461,7 +461,7 @@ export interface IDetailedCritterWithInternalId extends ICritterDetailedResponse } export interface IAnimalDeploymentWithCritter { - deployment: IAnimalDeployment; + deployment: TelemetryDeployment; critter: ICritterSimpleResponse; } diff --git a/app/src/interfaces/useTelemetryApi.interface.ts b/app/src/interfaces/useTelemetryApi.interface.ts index c2a4349e76..f4a1079a05 100644 --- a/app/src/interfaces/useTelemetryApi.interface.ts +++ b/app/src/interfaces/useTelemetryApi.interface.ts @@ -1,20 +1,20 @@ -import { DeploymentFormYupSchema } from 'features/surveys/telemetry/deployments/components/form/DeploymentForm'; -import { FeatureCollection } from 'geojson'; +import { DeploymentFormYupSchema } from 'features/surveys/telemetry/manage/deployments/form/DeploymentForm'; +import { FeatureCollection, Point } from 'geojson'; import { ApiPaginationResponseParams } from 'types/misc'; import yup from 'utils/YupSchema'; +// TODO Nick - Replace with new schema export interface IFindTelementryObj { telemetry_id: string; + deployment_id: number; + critter_id: number; + vendor: string; + serial: string; acquisition_date: string | null; latitude: number | null; longitude: number | null; - telemetry_type: string; - device_id: number; - bctw_deployment_id: string; - critter_id: number; - deployment_id: number; - critterbase_critter_id: string; - animal_id: string | null; + elevation: number | null; + temperature: number | null; } /** @@ -28,132 +28,47 @@ export interface IFindTelemetryResponse { pagination: ApiPaginationResponseParams; } -export interface ICritterDeploymentResponse { - critter_id: string; - device_id: number; - deployment_id: string; - survey_critter_id: string; - alias: string; - attachment_start: string; - attachment_end?: string; - taxon: string; -} - export interface IUpdateManualTelemetry { telemetry_manual_id: string; + deployment_id: number; latitude: number; longitude: number; acquisition_date: string; + transmission_date: string | null; } export interface ICreateManualTelemetry { - deployment_id: string; + deployment_id: number; latitude: number; longitude: number; acquisition_date: string; + transmission_date: string | null; } export interface IManualTelemetry extends ICreateManualTelemetry { telemetry_manual_id: string; } +/** + * Normalized telemetry record for all vendor types. + * + * @export + * @interface IAllTelemetry + */ export interface IAllTelemetry { - id: string; - deployment_id: string; - telemetry_manual_id: string; - telemetry_id: number | null; - device_id: string; - latitude: number; - longitude: number; + telemetry_id: string; + deployment_id: number; + critter_id: number; + vendor: string; + serial: string; acquisition_date: string; - telemetry_type: string; + latitude: number | null; + longitude: number | null; + elevation: number | null; + temperature: number | null; } -export type IAnimalDeployment = { - // BCTW properties - - /** - * The ID of a BCTW collar animal assignment (aka: deployment) record. - */ - assignment_id: string; - /** - * The ID of a BCTW collar record. - */ - collar_id: string; - /** - * The ID of a BCTW critter record. Should match a critter_id in Critterbase. - */ - critter_id: number; - /** - * The ID of a BCTW device. - */ - device_id: number; - /** - * The time the deployment started. - */ - attachment_start_date: string | null; - /** - * The time the deployment started. - */ - attachment_start_time: string | null; - /** - * The time the deployment ended. - */ - attachment_end_date: string | null; - /** - * The time the deployment ended. - */ - attachment_end_time: string | null; - /** - * The ID of a BCTW deployment record. - */ - bctw_deployment_id: string; - /** - * The ID of a BCTW device make record. - */ - device_make: number; - /** - * The model of the device. - */ - device_model: string | null; - /** - * The frequency of the device. - */ - frequency: number | null; - /** - * The ID of a BCTW frequency unit record. - */ - frequency_unit: number | null; - - // SIMS properties - - /** - * SIMS deployment record ID - */ - deployment_id: number; - /** - * Critterbase critter ID - */ - critterbase_critter_id: string; - /** - * Critterbase capture ID for the start of the deployment. - */ - critterbase_start_capture_id: string; - /** - * Critterbase capture ID for the end of the deployment. - */ - critterbase_end_capture_id: string | null; - /** - * Critterbase mortality ID for the end of the deployment. - */ - critterbase_end_mortality_id: string | null; -}; - export type ICreateAnimalDeployment = yup.InferType; -export interface ICreateAnimalDeploymentPostData extends Omit { - device_id: number; -} - export type IAllTelemetryPointCollection = { points: FeatureCollection; tracks: FeatureCollection }; export interface ITelemetry { @@ -191,15 +106,6 @@ export interface ITelemetry { telemetry_type: string; } -export interface ICodeResponse { - code_header_title: string; - code_header_name: string; - id: number; - code: string; - description: string; - long_description: string; -} - export type TelemetryDeviceKeyFile = { survey_telemetry_credential_attachment_id: number; uuid: string; @@ -212,3 +118,20 @@ export type TelemetryDeviceKeyFile = { description: string | null; key: string; }; + +export type TelemetrySpatial = { + /** + * The ID of the telemetry record (uuid). + */ + telemetry_id: string; + /** + * The geometry of the telemetry record. + */ + geometry: Point | null; +}; + +export type GetSurveyTelemetryResponse = { + telemetry: IAllTelemetry[]; + count: number; + pagination: ApiPaginationResponseParams; +}; diff --git a/app/src/interfaces/useTelemetryDeploymentApi.interface.ts b/app/src/interfaces/useTelemetryDeploymentApi.interface.ts new file mode 100644 index 0000000000..3fc243aff5 --- /dev/null +++ b/app/src/interfaces/useTelemetryDeploymentApi.interface.ts @@ -0,0 +1,69 @@ +import { ApiPaginationResponseParams } from 'types/misc'; + +/** + * Create telemetry deployment record. + */ +export type CreateTelemetryDeployment = { + device_id: number; + frequency: number | null; + frequency_unit_id: number | null; + attachment_start_date: string; + attachment_start_time: string | null; + attachment_end_date: string | null; + attachment_end_time: string | null; + critterbase_start_capture_id: string; + critterbase_end_capture_id: string | null; + critterbase_end_mortality_id: string | null; +}; + +/** + * Update telemetry deployment record. + */ +export type UpdateTelemetryDeployment = { + critter_id: number; + device_id: number; + frequency: number | null; + frequency_unit_id: number | null; + attachment_start_date: string; + attachment_start_time: string | null; + attachment_end_date: string | null; + attachment_end_time: string | null; + critterbase_start_capture_id: string; + critterbase_end_capture_id: string | null; + critterbase_end_mortality_id: string | null; +}; + +/** + * Telemetry deployment record. + */ +export type TelemetryDeployment = { + // deployment data + deployment_id: number; + survey_id: number; + critter_id: number; + device_id: number; + device_key: string; + frequency: number | null; + frequency_unit_id: number | null; + attachment_start_date: string; + attachment_start_time: string | null; + attachment_start_timestamp: string; + attachment_end_date: string | null; + attachment_end_time: string | null; + attachment_end_timestamp: string | null; + critterbase_start_capture_id: string; + critterbase_end_capture_id: string | null; + critterbase_end_mortality_id: string | null; + // device data + serial: string; + device_make_id: number; + model: string | null; + // critter data + critterbase_critter_id: string; +}; + +export type GetSurveyDeploymentsResponse = { + deployments: TelemetryDeployment[]; + count: number; + pagination: ApiPaginationResponseParams; +}; diff --git a/app/src/interfaces/useTelemetryDeviceApi.interface.ts b/app/src/interfaces/useTelemetryDeviceApi.interface.ts new file mode 100644 index 0000000000..d137a60839 --- /dev/null +++ b/app/src/interfaces/useTelemetryDeviceApi.interface.ts @@ -0,0 +1,31 @@ +/** + * Create telemetry device record. + */ +export type CreateTelemetryDevice = { + serial: string; + device_make_id: number; + model: string | null; + comment: string | null; +}; + +/** + * Update telemetry device record. + */ +export type UpdateTelemetryDevice = { + serial: string; + device_make_id: number; + model: string | null; + comment: string | null; +}; + +/** + * Telemetry device record. + */ +export type TelemetryDevice = { + device_id: number; + survey_id: number; + serial: string; + device_make_id: number; + model: string | null; + comment: string | null; +}; diff --git a/app/src/test-helpers/code-helpers.ts b/app/src/test-helpers/code-helpers.ts index 368f48a5da..40d22a336a 100644 --- a/app/src/test-helpers/code-helpers.ts +++ b/app/src/test-helpers/code-helpers.ts @@ -67,6 +67,14 @@ export const codes: IGetAllCodeSetsResponse = { { id: 1, name: 'Scat', description: 'Scat left by the species.' }, { id: 2, name: 'Direct sighting', description: 'A direct sighting of the species.' } ], + telemetry_device_makes: [ + { id: 1, name: 'Vectronic', description: 'Vectronic device make.' }, + { id: 2, name: 'Lotek', description: 'Lotek device make.' } + ], + frequency_units: [ + { id: 1, name: 'Hz', description: 'Hertz' }, + { id: 2, name: 'Mhz', description: 'Megahertz' } + ], alert_types: [ { id: 1, name: 'Survey', description: 'Alert about surveys.' }, { id: 2, name: 'General', description: 'General alert.' } diff --git a/app/src/utils/string-utils.ts b/app/src/utils/string-utils.ts new file mode 100644 index 0000000000..98ce83865d --- /dev/null +++ b/app/src/utils/string-utils.ts @@ -0,0 +1,16 @@ +/** + * Given a string: + * - If the string is empty, null, or undefined, then null will be returned. + * - Otherwise, the string will be converted to a number. + * + * @export + * @param {(string | null | undefined)} value + * @return {*} {(number | null)} + */ +export function numberOrNull(value: string | null | undefined): number | null { + if (value === null || value === undefined || value === '') { + return null; + } + + return Number(value); +} diff --git a/compose.yml b/compose.yml index df5d29b2a9..5bb7f1af74 100644 --- a/compose.yml +++ b/compose.yml @@ -103,11 +103,15 @@ services: - BACKBONE_ARTIFACT_INTAKE_PATH=${BACKBONE_ARTIFACT_INTAKE_PATH} - BIOHUB_TAXON_PATH=${BIOHUB_TAXON_PATH} - BIOHUB_TAXON_TSN_PATH=${BIOHUB_TAXON_TSN_PATH} - # BCTW / Critterbase - - BCTW_API_HOST=${BCTW_API_HOST} + # Critterbase - CB_API_HOST=${CB_API_HOST} # Feature Flags - FEATURE_FLAGS=${FEATURE_FLAGS} + # Telemetry TODO: Add to CI/CD pipelines + - LOTEK_API_HOST=${LOTEK_API_HOST} + - LOTEK_ACCOUNT_USERNAME=${LOTEK_ACCOUNT_USERNAME} + - LOTEK_ACCOUNT_PASSWORD=${LOTEK_ACCOUNT_PASSWORD} + - VECTRONIC_API_HOST=${VECTRONIC_API_HOST} volumes: - ./api:/opt/app-root/src - /opt/app-root/src/node_modules # prevents local node_modules overriding container node_modules diff --git a/database/.docker/db/create_postgis.sql b/database/.docker/db/create_postgis.sql index d2248dbfd0..5291294a72 100644 --- a/database/.docker/db/create_postgis.sql +++ b/database/.docker/db/create_postgis.sql @@ -11,3 +11,5 @@ CREATE EXTENSION IF NOT EXISTS pgRouting CASCADE; CREATE EXTENSION IF NOT EXISTS fuzzystrmatch CASCADE; CREATE EXTENSION IF NOT EXISTS pgcrypto CASCADE; +-- Indexes +CREATE EXTENSION IF NOT EXISTS btree_gist; \ No newline at end of file diff --git a/database/src/migrations/20241006000000_rename_old_deployment.ts b/database/src/migrations/20241006000000_rename_old_deployment.ts new file mode 100644 index 0000000000..a8209d1401 --- /dev/null +++ b/database/src/migrations/20241006000000_rename_old_deployment.ts @@ -0,0 +1,44 @@ +import { Knex } from 'knex'; + +/** + * Renames the existing deployment table to deployment_old to preserve the data while we migrate it to the new tables. + * + * Drops the old deployment table views, triggers, and constraints so the names can be re-usd by the new replacement + * tables. + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(`--sql + ---------------------------------------------------------------------------------------- + SET SEARCH_PATH=biohub_dapi_v1; + + -- Drop old deployment table view + DROP VIEW IF EXISTS deployment; + + ---------------------------------------------------------------------------------------- + SET SEARCH_PATH=biohub; + + -- Drop old deployment table journal/audit triggers + -- Note: the triggers were incorrectly named when originally created + DROP TRIGGER IF EXISTS audit_critter ON deployment; + DROP TRIGGER IF EXISTS journal_critter ON deployment; + + -- Drop old deployment table indexes + DROP INDEX IF EXISTS deployment_uk1; + DROP INDEX IF EXISTS deployment_idx1; + + -- Drop old deployment table constraints + ALTER TABLE deployment DROP CONSTRAINT IF EXISTS deployment_fk1; + ALTER TABLE deployment DROP CONSTRAINT IF EXISTS deployment_pk; + + -- Rename the existing deployment table to deployment_old to preserve the data while we migrate it to the new tables + ALTER TABLE deployment RENAME TO deployment_old; + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(``); +} diff --git a/database/src/migrations/20241007000000_bctw_telemetry_tables.ts b/database/src/migrations/20241007000000_bctw_telemetry_tables.ts new file mode 100644 index 0000000000..b6c19321ec --- /dev/null +++ b/database/src/migrations/20241007000000_bctw_telemetry_tables.ts @@ -0,0 +1,461 @@ +import { Knex } from 'knex'; +/** + * TABLE: telemetry_ats + * Raw row identifier: N/A + * Collar serial: `collarserialnumber` + * Device make: `ats` - Must match a value in the device_make lookup table + * + * Notes: + * 1. Generating a UUID column for the primary key to prevent collisions with other telemetry sources. + * 2. Generating the `device_key` as a combination of the device make and collar serial number. + * 3. The `collarserialnumber` value was previously Nullable, but is now required to generate the device_id. + * 4. Dropped deprecated `timeid` column + * + * TABLE: telemetry_vectronic + * Raw row identifier: `idposition` + * Collar serial: `idcollar` + * Device make: `vectronic` - Must match a value in the device_make lookup table + * + * Notes: + * 1. Generating a UUID column for the primary key to prevent collisions with other telemetry sources. + * 2. Generating the `device_key` as a combination of the device make and collar serial number. + * + * TABLE: telemetry_lotek + * Raw row identifier: N/A + * Collar serial: `deviceid` - Must match a value in the device_make lookup table + * + * Notes: + * 1. Generating a UUID column for the primary key to prevent collisions with other telemetry sources. + * 2. Generating the `device_key` as a combination of the device make and collar serial number. + * 3. Dropping the previously used `timeid` column as the primary key and replacing it with a UUID. + * 4. The `deviceid` value was previously Nullable, but is now required to generate the device_id. + * There is data in BCTW currently where that value is NULL, but since we will fetch all the data + * fresh this shouldn't be a problem. + * + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(`--sql + ---------------------------------------------------------------------------------------- + -- Add btree gist extension + ---------------------------------------------------------------------------------------- + CREATE EXTENSION IF NOT EXISTS btree_gist; + + ---------------------------------------------------------------------------------------- + -- Create telemetry_ats table + ---------------------------------------------------------------------------------------- + SET SEARCH_PATH=biohub; + + CREATE TABLE telemetry_ats ( + telemetry_ats_id uuid DEFAULT public.gen_random_uuid() NOT NULL, + device_key varchar GENERATED ALWAYS AS ('ats:' || collarserialnumber::text) STORED NOT NULL, + + collarserialnumber int4 NOT NULL, + "date" timestamptz NULL, + numberfixes int4 NULL, + battvoltage float8 NULL, + mortality bool NULL, + breakoff bool NULL, + gpsontime int4 NULL, + satontime int4 NULL, + saterrors int4 NULL, + gmtoffset int4 NULL, + lowbatt bool NULL, + "event" varchar(100) NULL, + latitude float8 NULL, + longitude float8 NULL, + cepradius_km int4 NULL, + temperature varchar NULL, + hdop varchar NULL, + numsats varchar NULL, + fixtime varchar NULL, + activity varchar NULL, + geography public.geography(point, 4326) GENERATED ALWAYS AS (CASE WHEN latitude IS NOT NULL AND longitude IS NOT NULL THEN public.ST_SetSRID(public.ST_MakePoint(longitude, latitude), 4326) ELSE NULL END) STORED, + + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + + CONSTRAINT telemetry_ats_pk PRIMARY KEY (telemetry_ats_id) + ); + + ---------------------------------------------------------------------------------------- + -- Create Indexes + ---------------------------------------------------------------------------------------- + CREATE INDEX telemetry_ats_idx1 ON telemetry_ats(device_key); + CREATE UNIQUE INDEX telemetry_ats_idx2 ON telemetry_ats(date, collarserialnumber); + CREATE INDEX telemetry_ats_idx3 ON telemetry_ats(date); + CREATE INDEX telemetry_ats_idx4 ON telemetry_ats USING gist(geography); + + COMMENT ON TABLE telemetry_ats IS 'Raw telemetry data from the ATS API'; + COMMENT ON COLUMN telemetry_ats.telemetry_ats_id IS 'Primary key for telemetry_ats table. This data should only be updated by the Cronjob.'; + COMMENT ON COLUMN telemetry_ats.device_key IS 'A generated key for the device make and serial. This is a combination of the device make and the serial number. ie: ats:12345'; + COMMENT ON COLUMN telemetry_ats.collarserialnumber IS 'The serial number printed on the device. Not used as a key.'; + COMMENT ON COLUMN telemetry_ats."date" IS 'The timestamp at which this row was recorded.'; + COMMENT ON COLUMN telemetry_ats.numberfixes IS 'Unknown description. Assumption: Number of GPS fixes obtained to generate this telemetry point.'; + COMMENT ON COLUMN telemetry_ats.battvoltage IS 'Voltage running through main battery of the device at time this row was recorded.'; + COMMENT ON COLUMN telemetry_ats.mortality IS 'Indicates whether the device is reporting that the animal has died or not.'; + COMMENT ON COLUMN telemetry_ats.breakoff IS 'Indicates whether the device is reporting that it has been detached from the animal.'; + COMMENT ON COLUMN telemetry_ats.gpsontime IS 'A number from 0 to 300 representing the length of time in seconds the GPS receiver was on to determine the last location. A zero represents the data status at the end of a fifteen-minute almanac reading.'; + COMMENT ON COLUMN telemetry_ats.satontime IS 'Unknown description. Assumption: the amount of time the collars satellite transmitter was powered on to exchange data.'; + COMMENT ON COLUMN telemetry_ats.saterrors IS 'Unknown description. Assumption: Number of times the device was unable to establish a fix with the satellite.'; + COMMENT ON COLUMN telemetry_ats.gmtoffset IS 'Unknown description. Assumption: Greenwhich Mean Time offset for the date field.'; + COMMENT ON COLUMN telemetry_ats.lowbatt IS 'Indicates whether the device is reporting low battery.'; + COMMENT ON COLUMN telemetry_ats."event" IS 'Unknown description. Assumption: Additional event information that cannot be indicated by the other boolean fields. Note that this just defaults to None.'; + COMMENT ON COLUMN telemetry_ats.latitude IS 'North-South position along surface of the Earth. WGS 84.'; + COMMENT ON COLUMN telemetry_ats.longitude IS 'East-West position along the surface of the Earth. WGS 84.'; + COMMENT ON COLUMN telemetry_ats.cepradius_km IS 'Perhaps circular error probable radius, which would indicate the mean radius about the recorded point that the data could be off by.'; + COMMENT ON COLUMN telemetry_ats.temperature IS 'Temperature in Celcius'; + COMMENT ON COLUMN telemetry_ats.hdop IS 'Horizontal dilution of precision, another indication of error propagation in satellite tracking.'; + COMMENT ON COLUMN telemetry_ats.numsats IS 'Number of satellites used in achieving GPS fix'; + COMMENT ON COLUMN telemetry_ats.fixtime IS 'Number of seconds needed to achieve GPS fix'; + COMMENT ON COLUMN telemetry_ats.activity IS 'Activity value represents change in the accelerometer value internal to the collar between GPS fixes. Exact numeric meaning varies between models.'; + COMMENT ON COLUMN telemetry_ats.geography IS 'The latitude and longitude as a PostGIS geography point.'; + COMMENT ON COLUMN telemetry_ats.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN telemetry_ats.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_ats.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN telemetry_ats.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_ats.revision_count IS 'Revision count used for concurrency control.'; + + ---------------------------------------------------------------------------------------- + -- Add triggers + ---------------------------------------------------------------------------------------- + CREATE TRIGGER audit_telemetry_ats BEFORE INSERT OR UPDATE OR DELETE ON biohub.telemetry_ats FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_telemetry_ats AFTER INSERT OR UPDATE OR DELETE ON biohub.telemetry_ats FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + + ---------------------------------------------------------------------------------------- + -- Create telemetry_vectronic Table + ---------------------------------------------------------------------------------------- + CREATE TABLE telemetry_vectronic ( + telemetry_vectronic_id UUID DEFAULT public.gen_random_uuid() NOT NULL, + device_key VARCHAR GENERATED ALWAYS AS ('vectronic:' || idcollar::text) STORED NOT NULL, + + idposition INT4 NOT NULL, + idcollar INT4 NOT NULL, + acquisitiontime TIMESTAMPTZ NULL, + scts TIMESTAMPTZ NULL, + origincode TEXT NULL, + ecefx FLOAT8 NULL, + ecefy FLOAT8 NULL, + ecefz FLOAT8 NULL, + latitude FLOAT8 NULL, + longitude FLOAT8 NULL, + height FLOAT8 NULL, + dop FLOAT8 NULL, + idfixtype INT4 NULL, + positionerror FLOAT8 NULL, + satcount INT4 NULL, + ch01satid INT4 NULL, + ch01satcnr INT4 NULL, + ch02satid INT4 NULL, + ch02satcnr INT4 NULL, + ch03satid INT4 NULL, + ch03satcnr INT4 NULL, + ch04satid INT4 NULL, + ch04satcnr INT4 NULL, + ch05satid INT4 NULL, + ch05satcnr INT4 NULL, + ch06satid INT4 NULL, + ch06satcnr INT4 NULL, + ch07satid INT4 NULL, + ch07satcnr INT4 NULL, + ch08satid INT4 NULL, + ch08satcnr INT4 NULL, + ch09satid INT4 NULL, + ch09satcnr INT4 NULL, + ch10satid INT4 NULL, + ch10satcnr INT4 NULL, + ch11satid INT4 NULL, + ch11satcnr INT4 NULL, + ch12satid INT4 NULL, + ch12satcnr INT4 NULL, + idmortalitystatus INT4 NULL, + activity INT4 NULL, + mainvoltage FLOAT8 NULL, + backupvoltage FLOAT8 NULL, + temperature FLOAT8 NULL, + transformedx FLOAT8 NULL, + transformedy FLOAT8 NULL, + geography public.geography(point, 4326) GENERATED ALWAYS AS (CASE WHEN latitude IS NOT NULL AND longitude IS NOT NULL THEN public.ST_SetSRID(public.ST_MakePoint(longitude, latitude), 4326) ELSE NULL END) STORED, + + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + + CONSTRAINT telemetry_vectronic_pk PRIMARY KEY (telemetry_vectronic_id), + CONSTRAINT telemetry_vectronic_idposition_un UNIQUE (idposition) + ); + + ---------------------------------------------------------------------------------------- + -- Create Indexes + ---------------------------------------------------------------------------------------- + CREATE INDEX telemetry_vectronic_idx1 ON telemetry_vectronic(device_key); + CREATE INDEX telemetry_vectronic_idx2 ON telemetry_vectronic USING gist (geography); + CREATE INDEX telemetry_vectronic_idx3 ON telemetry_vectronic(acquisitiontime); + + COMMENT ON TABLE telemetry_vectronic IS 'The raw telemetry data from Vectronics API. This data should only be updated by the Cronjob.'; + COMMENT ON COLUMN telemetry_vectronic.telemetry_vectronic_id IS 'Primary key for telemetry_vectronic table'; + COMMENT ON COLUMN telemetry_vectronic.device_key IS 'A generated key for the device make and serial number ie: vectronic:12345'; + COMMENT ON COLUMN telemetry_vectronic.idposition IS 'acts as the primary key of the table, this is a vectronic database identifier'; + COMMENT ON COLUMN telemetry_vectronic.idcollar IS 'Vectronic device ID'; + COMMENT ON COLUMN telemetry_vectronic.acquisitiontime IS 'Timestamp from the device marking when the record was recorded'; + COMMENT ON COLUMN telemetry_vectronic.scts IS 'SCTS - Service Center Timestamp. Timestamp when the record was received by the service center.'; + COMMENT ON COLUMN telemetry_vectronic.origincode IS 'Code to identify the origin of this record. I - Iridium, G - Globalstar, S - GSM SMS, F- GSM FTP, C - Collar, A - Argos, T - Terminal, 0 - Unknown'; + COMMENT ON COLUMN telemetry_vectronic.ecefx IS 'Earth Centered Earth Fixed X direction is the cartesian coordinate where the origin is the center of the earth. The x direction is the intersection of the prime meridian with the equator.'; + COMMENT ON COLUMN telemetry_vectronic.ecefy IS 'Earth Centered Earth Fixed Y direction is the cartesian coordinate where the origin is the center of the earth.'; + COMMENT ON COLUMN telemetry_vectronic.ecefz IS 'Earth Centered Earth Fixed Z direction is the cartesian coordinate where the origin is the center of the earth. The z direction is the intersection from the center of the earth to the north pole.'; + COMMENT ON COLUMN telemetry_vectronic.latitude IS 'North-South position along surface of the Earth. WGS 84.'; + COMMENT ON COLUMN telemetry_vectronic.longitude IS 'East-West position along the surface of the Earth. WGS 84.'; + COMMENT ON COLUMN telemetry_vectronic.height IS 'WGS 84 Height.'; + COMMENT ON COLUMN telemetry_vectronic.dop IS 'Dilution of precision.'; + COMMENT ON COLUMN telemetry_vectronic.idfixtype IS 'Code value for the GPS fixtype. 0 - No Fix, 10 - GPS-1 Sat, 11 - GPS-2 Sat, 12 - GPS-2D, 13 - GPS-3D, 14 - val. GPS-3D, 1 - Argos-Z, 2 -Argos-B, 3 - Argos-A, 4 - Argos-0, 5 - Argos-1, 6 - Argos-2, 7 - Argos-3'; + COMMENT ON COLUMN telemetry_vectronic.positionerror IS 'No description provided by vendor.'; + COMMENT ON COLUMN telemetry_vectronic.satcount IS 'Amount of visible satellites.'; + COMMENT ON COLUMN telemetry_vectronic.ch01satid IS 'Satellite ID of Channel 1'; + COMMENT ON COLUMN telemetry_vectronic.ch01satcnr IS 'Satellite CNR of Channel 1'; + COMMENT ON COLUMN telemetry_vectronic.ch02satid IS 'Satellite ID of Channel 2'; + COMMENT ON COLUMN telemetry_vectronic.ch02satcnr IS 'Satellite CNR of Channel 2'; + COMMENT ON COLUMN telemetry_vectronic.ch03satid IS 'Satellite ID of Channel 3'; + COMMENT ON COLUMN telemetry_vectronic.ch03satcnr IS 'Satellite CNR of Channel 3'; + COMMENT ON COLUMN telemetry_vectronic.ch04satid IS 'Satellite ID of Channel 4'; + COMMENT ON COLUMN telemetry_vectronic.ch04satcnr IS 'Satellite CNR of Channel 4'; + COMMENT ON COLUMN telemetry_vectronic.ch05satid IS 'Satellite ID of Channel 5'; + COMMENT ON COLUMN telemetry_vectronic.ch05satcnr IS 'Satellite CNR of Channel 5'; + COMMENT ON COLUMN telemetry_vectronic.ch06satid IS 'Satellite ID of Channel 6'; + COMMENT ON COLUMN telemetry_vectronic.ch06satcnr IS 'Satellite CNR of Channel 6'; + COMMENT ON COLUMN telemetry_vectronic.ch07satid IS 'Satellite ID of Channel 7'; + COMMENT ON COLUMN telemetry_vectronic.ch07satcnr IS 'Satellite CNR of Channel 7'; + COMMENT ON COLUMN telemetry_vectronic.ch08satid IS 'Satellite ID of Channel 8'; + COMMENT ON COLUMN telemetry_vectronic.ch08satcnr IS 'Satellite CNR of Channel 8'; + COMMENT ON COLUMN telemetry_vectronic.ch09satid IS 'Satellite ID of Channel 9'; + COMMENT ON COLUMN telemetry_vectronic.ch09satcnr IS 'Satellite CNR of Channel 9'; + COMMENT ON COLUMN telemetry_vectronic.ch10satid IS 'Satellite ID of Channel 10'; + COMMENT ON COLUMN telemetry_vectronic.ch10satcnr IS 'Satellite CNR of Channel 10'; + COMMENT ON COLUMN telemetry_vectronic.ch11satid IS 'Satellite ID of Channel 11'; + COMMENT ON COLUMN telemetry_vectronic.ch11satcnr IS 'Satellite CNR of Channel 11'; + COMMENT ON COLUMN telemetry_vectronic.ch12satid IS 'Satellite ID of Channel 12'; + COMMENT ON COLUMN telemetry_vectronic.ch12satcnr IS 'Satellite CNR of Channel 12'; + COMMENT ON COLUMN telemetry_vectronic.idmortalitystatus IS 'Code value of mortality status.'; + COMMENT ON COLUMN telemetry_vectronic.activity IS 'No description provided by vendor.'; + COMMENT ON COLUMN telemetry_vectronic.mainvoltage IS 'Voltage indicator of main battery.'; + COMMENT ON COLUMN telemetry_vectronic.backupvoltage IS 'Voltage indicator of backup / beacon battery.'; + COMMENT ON COLUMN telemetry_vectronic.temperature IS 'Devices temperature reading in Celsius.'; + COMMENT ON COLUMN telemetry_vectronic.transformedx IS 'No description provided by vendor.'; + COMMENT ON COLUMN telemetry_vectronic.transformedy IS 'No description provided by vendor.'; + COMMENT ON COLUMN telemetry_vectronic.geography IS 'The latitude and longitude as a PostGIS geography point.'; + COMMENT ON COLUMN telemetry_vectronic.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN telemetry_vectronic.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_vectronic.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN telemetry_vectronic.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_vectronic.revision_count IS 'Revision count used for concurrency control.'; + + ---------------------------------------------------------------------------------------- + -- Add triggers + ---------------------------------------------------------------------------------------- + CREATE TRIGGER audit_telemetry_vectronic BEFORE INSERT OR UPDATE OR DELETE ON biohub.telemetry_vectronic FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_telemetry_vectronic AFTER INSERT OR UPDATE OR DELETE ON biohub.telemetry_vectronic FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + + ---------------------------------------------------------------------------------------- + -- Create telemetry_lotek Table + ---------------------------------------------------------------------------------------- + CREATE TABLE telemetry_lotek ( + telemetry_lotek_id UUID DEFAULT public.gen_random_uuid() NOT NULL, + device_key VARCHAR GENERATED ALWAYS AS ('lotek:' || deviceid::text) STORED NOT NULL, + + channelstatus TEXT NULL, + uploadtimestamp TIMESTAMPTZ NULL, + latitude FLOAT8 NULL, + longitude FLOAT8 NULL, + altitude FLOAT8 NULL, + ecefx FLOAT8 NULL, + ecefy FLOAT8 NULL, + ecefz FLOAT8 NULL, + rxstatus INT4 NULL, + pdop FLOAT8 NULL, + mainv FLOAT8 NULL, + bkupv FLOAT8 NULL, + temperature FLOAT8 NULL, + fixduration INT4 NULL, + bhastempvoltage BOOL NULL, + devname TEXT NULL, + deltatime FLOAT8 NULL, + fixtype TEXT NULL, + cepradius FLOAT8 NULL, + crc FLOAT8 NULL, + deviceid INT4 NULL, + recdatetime TIMESTAMPTZ NULL, + geography public.geography(point, 4326) GENERATED ALWAYS AS (CASE WHEN latitude IS NOT NULL AND longitude IS NOT NULL THEN public.ST_SetSRID(public.ST_MakePoint(longitude, latitude), 4326) ELSE NULL END) STORED, + + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + + CONSTRAINT telemetry_lotek_pk PRIMARY KEY (telemetry_lotek_id) + ); + + ---------------------------------------------------------------------------------------- + -- Create Indexes + ---------------------------------------------------------------------------------------- + CREATE INDEX telemetry_lotek_idx1 ON telemetry_lotek(device_key); + CREATE INDEX telemetry_lotek_idx2 ON telemetry_lotek USING gist (geography); + CREATE UNIQUE INDEX telemetry_lotek_idx3 ON telemetry_lotek(recdatetime, deviceid); + + COMMENT ON TABLE telemetry_lotek IS 'The raw telemetry data from Lotek. This data should only be updated by the Cronjob.'; + COMMENT ON COLUMN telemetry_lotek.telemetry_lotek_id IS 'Primary key for telemetry_lotek table'; + COMMENT ON COLUMN telemetry_lotek.device_key IS 'A generated key for the device make and serial number ie: lotek:12345'; + COMMENT ON COLUMN telemetry_lotek.channelstatus IS 'Unknown description'; + COMMENT ON COLUMN telemetry_lotek.uploadtimestamp IS 'Datetime of Iridium Upload in GMT'; + COMMENT ON COLUMN telemetry_lotek.latitude IS 'North-south position on the surface of the earth that this device transmitted from'; + COMMENT ON COLUMN telemetry_lotek.longitude IS 'East-west position on the surface of the earth that this device transmitted from'; + COMMENT ON COLUMN telemetry_lotek.altitude IS 'Vertical height in meters, calculated as Height Above Ellipsoid (HAE)'; + COMMENT ON COLUMN telemetry_lotek.ecefx IS 'Activity data for X axis; only applicable to certain collar models'; + COMMENT ON COLUMN telemetry_lotek.ecefy IS 'Activity data for Y axis; only applicable to certain collar models'; + COMMENT ON COLUMN telemetry_lotek.ecefz IS 'Activity data for Z axis; only applicable to certain collar models'; + COMMENT ON COLUMN telemetry_lotek.rxstatus IS 'Unknown description'; + COMMENT ON COLUMN telemetry_lotek.pdop IS 'Positional Dilution of Precision; see https://gisgeography.com/gps-accuracyhdop-pdop-gdop-multipath/'; + COMMENT ON COLUMN telemetry_lotek.mainv IS 'Voltage of main battery; only present with certain collar models'; + COMMENT ON COLUMN telemetry_lotek.bkupv IS 'Voltage of backup battery; only present with certain collar models'; + COMMENT ON COLUMN telemetry_lotek.temperature IS 'Temperature in Celcius'; + COMMENT ON COLUMN telemetry_lotek.fixduration IS 'Time taken for GPS fix attempt; only present with certain collar models'; + COMMENT ON COLUMN telemetry_lotek.bhastempvoltage IS 'Does the collar record both temperature and voltage? Only present with certain collar models'; + COMMENT ON COLUMN telemetry_lotek.devname IS 'User-assigned name for the collar; this setting is configured on Web Servic'; + COMMENT ON COLUMN telemetry_lotek.deltatime IS 'Applies to Swift Fix collars only; the difference between the satellite time and the time of the clock on-board the collar'; + COMMENT ON COLUMN telemetry_lotek.fixtype IS 'Numeric indicator to differentiate between IridiumTrack, Litetrack and Swift Fix collars'; + COMMENT ON COLUMN telemetry_lotek.cepradius IS 'Applies to Swift Fix collars only; Circular Error Probable for location data'; + COMMENT ON COLUMN telemetry_lotek.crc IS 'Applies to Swift Fix collars only; pertains to the handling of location data'; + COMMENT ON COLUMN telemetry_lotek.deviceid IS 'the Lotek device ID'; + COMMENT ON COLUMN telemetry_lotek.recdatetime IS 'timestamp the telemetry was recorded'; + COMMENT ON COLUMN telemetry_lotek.geography IS 'The latitude and longitude as a PostGIS geography point.'; + COMMENT ON COLUMN telemetry_lotek.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN telemetry_lotek.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_lotek.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN telemetry_lotek.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_lotek.revision_count IS 'Revision count used for concurrency control.'; + + ---------------------------------------------------------------------------------------- + -- Add triggers + ---------------------------------------------------------------------------------------- + CREATE TRIGGER audit_telemetry_lotek BEFORE INSERT OR UPDATE OR DELETE ON biohub.telemetry_lotek FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_telemetry_lotek AFTER INSERT OR UPDATE OR DELETE ON biohub.telemetry_lotek FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + + ---------------------------------------------------------------------------------------- + -- Create device make table + ---------------------------------------------------------------------------------------- + CREATE TABLE device_make ( + device_make_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + name varchar(32) NOT NULL, + description varchar(128), + notes varchar(128), + record_effective_date timestamptz(6), -- Intentionally nullable + record_end_date timestamptz(6), + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + + CONSTRAINT device_make_id_pk PRIMARY KEY (device_make_id) + ); + + COMMENT ON TABLE device_make IS 'This table is intended to store options that users can select for their device make.'; + COMMENT ON COLUMN device_make.device_make_id IS 'Composite primary key (id) for device make.'; + COMMENT ON COLUMN device_make.name IS 'Composite primary key (name) of the device make option.'; + COMMENT ON COLUMN device_make.description IS 'Description of the device make option.'; + COMMENT ON COLUMN device_make.description IS 'Additional internal related notes of the record.'; + COMMENT ON COLUMN device_make.record_effective_date IS 'Start date of the device make option.'; + COMMENT ON COLUMN device_make.record_end_date IS 'End date of the device make option.'; + COMMENT ON COLUMN device_make.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN device_make.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN device_make.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN device_make.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN device_make.revision_count IS 'Revision count used for concurrency control.'; + + ---------------------------------------------------------------------------------------- + -- Add triggers + ---------------------------------------------------------------------------------------- + CREATE TRIGGER audit_device_make BEFORE INSERT OR UPDATE OR DELETE ON biohub.device_make FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_device_make AFTER INSERT OR UPDATE OR DELETE ON biohub.device_make FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + ---------------------------------------------------------------------------------------- + -- Add initial values to device make table + ---------------------------------------------------------------------------------------- + INSERT INTO device_make (name, description, notes, record_effective_date) VALUES + ('vectronic', 'Vectronic Aerospace Telemetry', 'This label must never change, raw telemetry table (telemetry_vectronic) references this value to generate device_key.', 'NOW()'), + ('lotek', 'Lotek Telemetry', 'This label must never change, raw telemetry table (telemetry_lotek) references this value to generate device_key.', 'NOW()'), + ('ats', 'Advanced Telemetry Systems', 'This label must never change, raw telemetry table (telemetry_ats) references this value to generate device_key.', 'NOW()'), + ('followit', 'Followit Telemetry', 'This device make is currently unsupported. All devices which use this value will have no source of telemetry', NULL), + ('televit', 'Televit Telemetry', 'This device make is currently unsupported. All devices which use this value will have no source of telemetry', NULL), + ('teleonics', 'Teleonics Telemetry', 'This device make is currently unsupported. All devices which use this value will have no source of telemetry', NULL); + + ---------------------------------------------------------------------------------------- + -- Create frequency table + ---------------------------------------------------------------------------------------- + CREATE TABLE frequency_unit ( + frequency_unit_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + name varchar(32) NOT NULL, + description varchar(128), + record_effective_date timestamptz(6) NOT NULL, + record_end_date timestamptz(6), + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + + CONSTRAINT frequency_unit_pk PRIMARY KEY (frequency_unit_id) + ); + + COMMENT ON TABLE frequency_unit IS 'This table is intended to store options that users can select for their freqency unit.'; + COMMENT ON COLUMN frequency_unit.frequency_unit_id IS 'Primary key for frequency unit.'; + COMMENT ON COLUMN frequency_unit.name IS 'Name of the frequency unit option.'; + COMMENT ON COLUMN frequency_unit.description IS 'Description of the frequency unit option.'; + COMMENT ON COLUMN frequency_unit.record_effective_date IS 'Start date of the frequency unit option.'; + COMMENT ON COLUMN frequency_unit.record_end_date IS 'End date of the frequency unit option.'; + COMMENT ON COLUMN frequency_unit.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN frequency_unit.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN frequency_unit.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN frequency_unit.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN frequency_unit.revision_count IS 'Revision count used for concurrency control.'; + + ---------------------------------------------------------------------------------------- + -- Add triggers + ---------------------------------------------------------------------------------------- + CREATE TRIGGER audit_frequency_unit BEFORE INSERT OR UPDATE OR DELETE ON biohub.frequency_unit FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_frequency_unit AFTER INSERT OR UPDATE OR DELETE ON biohub.frequency_unit FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + ---------------------------------------------------------------------------------------- + -- Add initial values to frequency unit table + ---------------------------------------------------------------------------------------- + INSERT INTO frequency_unit (name, description, record_effective_date) VALUES + ('khz', 'Kilohertz', 'NOW()'), + ('mhz', 'Megahertz', 'NOW()'), + ('hz', 'Hertz', 'NOW()'); + + ---------------------------------------------------------------------------------------- + -- Create Views + ---------------------------------------------------------------------------------------- + SET SEARCH_PATH=biohub_dapi_v1; + + CREATE OR REPLACE VIEW telemetry_ats as SELECT * FROM biohub.telemetry_ats; + CREATE OR REPLACE VIEW telemetry_vectronic as SELECT * FROM biohub.telemetry_vectronic; + CREATE OR REPLACE VIEW telemetry_lotek as SELECT * FROM biohub.telemetry_lotek; + CREATE OR REPLACE VIEW device_make as SELECT * FROM biohub.device_make; + CREATE OR REPLACE VIEW frequency_unit as SELECT * FROM biohub.frequency_unit; + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(``); +} diff --git a/database/src/migrations/20241008000000_bctw_migration.ts b/database/src/migrations/20241008000000_bctw_migration.ts new file mode 100644 index 0000000000..a25ce732bf --- /dev/null +++ b/database/src/migrations/20241008000000_bctw_migration.ts @@ -0,0 +1,361 @@ +import { Knex } from 'knex'; + +/** + * Add new tables (migrated concepts from BCTW) + * - device + * - deployment (to replace the old `deployment` table which was renamed to `deployment_old`) + * - telemetry_manual + * - telemetry_credential_lotek + * - telemetry_credential_vectronic + * - survey_telemetry_vendor_credential + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function up(knex: Knex): Promise { + await knex.raw(`--sql + ---------------------------------------------------------------------------------------- + -- Create new tables + ---------------------------------------------------------------------------------------- + SET SEARCH_PATH=biohub; + + CREATE TABLE device ( + device_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + survey_id integer NOT NULL, + device_key varchar NOT NULL, + serial varchar NOT NULL, + device_make_id integer NOT NULL, + model varchar(100), + comment varchar(250), + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT device_pk PRIMARY KEY (device_id) + ); + + COMMENT ON TABLE device IS 'A device of a telemetry (or similar) device on an animal.'; + COMMENT ON COLUMN device.device_id IS '(Generated) Surrogate primary key identifier.'; + COMMENT ON COLUMN device.survey_id IS 'Foreign key to the survey table.'; + COMMENT ON COLUMN device.device_key IS '(Generated) The SIMS unique key for the device.'; + COMMENT ON COLUMN device.serial IS 'The serial identifier of the device.'; + COMMENT ON COLUMN device.device_make_id IS 'Foreign key to the device_make table.'; + COMMENT ON COLUMN device.model IS 'The device model.'; + COMMENT ON COLUMN device.comment IS 'A comment about the device.'; + COMMENT ON COLUMN device.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN device.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN device.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN device.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN device.revision_count IS 'Revision count used for concurrency control.'; + + -- Add foreign key constraints + ALTER TABLE device + ADD CONSTRAINT device_fk1 + FOREIGN KEY (survey_id) + REFERENCES survey(survey_id); + + ALTER TABLE device + ADD CONSTRAINT device_fk2 + FOREIGN KEY (device_make_id) + REFERENCES device_make(device_make_id); + + -- Add unique constraints + ALTER TABLE device ADD CONSTRAINT device_uk1 UNIQUE (survey_id, serial, device_make_id); + + -- Add indexes for foreign keys + CREATE INDEX device_idx1 ON device(survey_id); + + CREATE INDEX device_idx2 ON device(device_make_id); + + -- Add indexes + CREATE INDEX device_idx3 ON device(device_key); + + ---------------------------------------------------------------------------------------- + + CREATE TABLE deployment ( + deployment_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + survey_id integer NOT NULL, + critter_id integer NOT NULL, + device_id integer NOT NULL, + device_key varchar NOT NULL, + frequency integer, + frequency_unit_id integer, + attachment_start_date date NOT NULL, + attachment_start_time time, + attachment_start_timestamp timestamptz(6) GENERATED ALWAYS AS (COALESCE(attachment_start_date + attachment_start_time, attachment_start_date::timestamp)) stored, + attachment_end_date date, + attachment_end_time time, + attachment_end_timestamp timestamptz(6) GENERATED ALWAYS AS (COALESCE(attachment_end_date + attachment_end_time, attachment_end_date::timestamp)) stored, + critterbase_start_capture_id uuid, + critterbase_end_capture_id uuid, + critterbase_end_mortality_id uuid, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + -- Check that the critterbase_end_capture_id and critterbase_end_mortality_id are mutually exclusive (only one can be set, or neither) + CONSTRAINT check_critterbase_end_id CHECK (NOT (critterbase_end_capture_id IS NOT NULL AND critterbase_end_mortality_id IS NOT NULL)), + -- Check that the attachment_start_timestamp is before attachment_end_timestamp + CONSTRAINT check_attachment_start_before_end CHECK (attachment_start_timestamp <= attachment_end_timestamp), + -- Check that frequency and frequency_unit_id coexist (both must be null or both must be not null) + CONSTRAINT check_frequency_and_unit CHECK ((frequency IS NOT NULL AND frequency_unit_id IS NOT NULL) OR (frequency IS NULL AND frequency_unit_id IS NULL)), + -- Check that for deployments of the same device_key, that the attachment dates do not overlap + CONSTRAINT check_no_device_attachment_date_overlap EXCLUDE USING gist ( + device_key WITH =, + tstzrange(attachment_start_timestamp, attachment_end_timestamp) WITH && + ), + CONSTRAINT deployment_pk PRIMARY KEY (deployment_id) + ); + + COMMENT ON TABLE deployment IS 'A deployment of a telemetry (or similar) device on an animal.'; + COMMENT ON COLUMN deployment.deployment_id IS '(Generated) Surrogate primary key identifier.'; + COMMENT ON COLUMN deployment.survey_id IS 'Foreign key to the survey table.'; + COMMENT ON COLUMN deployment.critter_id IS 'Foreign key to the critter table.'; + COMMENT ON COLUMN deployment.device_id IS 'Foreign key to the device table.'; + COMMENT ON COLUMN deployment.device_key IS '(Generated) The SIMS unique key for the device.'; + COMMENT ON COLUMN deployment.frequency IS 'The frequency of the device.'; + COMMENT ON COLUMN deployment.frequency_unit_id IS 'Foreign key to the frequency_unit table.'; + COMMENT ON COLUMN deployment.attachment_start_date IS 'The date the device was attached to the animal.'; + COMMENT ON COLUMN deployment.attachment_start_time IS 'The time the device was attached to the animal.'; + COMMENT ON COLUMN deployment.attachment_start_timestamp IS '(Generated) The timestamp the device was attached to the animal.'; + COMMENT ON COLUMN deployment.attachment_end_date IS 'The date the device was removed from the animal.'; + COMMENT ON COLUMN deployment.attachment_end_time IS 'The time the device was removed from the animal.'; + COMMENT ON COLUMN deployment.attachment_end_timestamp IS '(Generated) The timestamp the device was removed from the animal.'; + COMMENT ON COLUMN deployment.critterbase_start_capture_id IS 'UUID of an external Critterbase capture record. The capture event during which the device was attached to the animal.'; + COMMENT ON COLUMN deployment.critterbase_end_capture_id IS 'UUID of an external Critterbase capture record. The capture event during which the device was removed from the animal.'; + COMMENT ON COLUMN deployment.critterbase_end_mortality_id IS 'UUID of an external Critterbase mortality record. The mortality event during which the device was removed from the animal.'; + COMMENT ON COLUMN deployment.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN deployment.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN deployment.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN deployment.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN deployment.revision_count IS 'Revision count used for concurrency control.'; + + -- Add foreign key constraints + ALTER TABLE deployment + ADD CONSTRAINT deployment_fk1 + FOREIGN KEY (survey_id) + REFERENCES survey(survey_id); + + ALTER TABLE deployment + ADD CONSTRAINT deployment_fk2 + FOREIGN KEY (critter_id) + REFERENCES critter(critter_id); + + ALTER TABLE deployment + ADD CONSTRAINT deployment_fk3 + FOREIGN KEY (device_id) + REFERENCES device(device_id); + + ALTER TABLE deployment + ADD CONSTRAINT deployment_fk4 + FOREIGN KEY (frequency_unit_id) + REFERENCES frequency_unit(frequency_unit_id); + + -- Add indexes for foreign keys + CREATE INDEX deployment_idx1 ON deployment(survey_id); + + CREATE INDEX deployment_idx2 ON deployment(critter_id); + + CREATE INDEX deployment_idx3 ON deployment(device_id); + + CREATE INDEX deployment_idx4 ON deployment(frequency_unit_id); + + -- Add indexes + CREATE INDEX deployment_idx5 ON deployment(device_key); + ---------------------------------------------------------------------------------------- + + CREATE TABLE telemetry_manual ( + telemetry_manual_id uuid DEFAULT public.gen_random_uuid(), + deployment_id integer NOT NULL, + latitude numeric(10, 7) NOT NULL, + longitude numeric(10, 7) NOT NULL, + acquisition_date timestamptz(6) NOT NULL, + transmission_date timestamptz(6), + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT telemetry_manual_pk PRIMARY KEY (telemetry_manual_id) + ); + + COMMENT ON TABLE telemetry_manual IS 'A manually entered telemetry record.'; + COMMENT ON COLUMN telemetry_manual.telemetry_manual_id IS '(Generated) Surrogate primary key identifier.'; + COMMENT ON COLUMN telemetry_manual.deployment_id IS 'Foreign key to the deployment table.'; + COMMENT ON COLUMN telemetry_manual.latitude IS 'The latitude of the telemetry record, having ten points of total precision and 7 points of precision after the decimal.'; + COMMENT ON COLUMN telemetry_manual.longitude IS 'The longitude of the telemetry record, having ten points of total precision and 7 points of precision after the decimal.'; + COMMENT ON COLUMN telemetry_manual.acquisition_date IS 'The date the device recorded the telemetry record. (Ex: the device captures a gps point every hour).'; + COMMENT ON COLUMN telemetry_manual.transmission_date IS 'The date the device transmitted the telemetry record to the vendor. (Ex: the device transmits all recorded gps points to the vendor every 24 hours).'; + COMMENT ON COLUMN telemetry_manual.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN telemetry_manual.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_manual.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN telemetry_manual.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_manual.revision_count IS 'Revision count used for concurrency control.'; + + -- Add foreign key constraints + ALTER TABLE telemetry_manual + ADD CONSTRAINT telemetry_manual_fk1 + FOREIGN KEY (deployment_id) + REFERENCES deployment(deployment_id); + + -- Add indexes for foreign keys + CREATE INDEX telemetry_manual_idx1 ON deployment(deployment_id); + + ---------------------------------------------------------------------------------------- + + CREATE TABLE telemetry_credential_lotek ( + telemetry_credential_lotek_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + device_key varchar GENERATED ALWAYS AS ('lotek:' || ndeviceid::text) STORED, + ndeviceid integer NOT NULL, + strspecialid varchar(100), + dtcreated timestamptz(6), + strsatellite varchar(100), + verified_date timestamptz(6), + is_valid boolean, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT telemetry_credential_lotek_pk PRIMARY KEY (telemetry_credential_lotek_id) + ); + + COMMENT ON TABLE telemetry_credential_lotek IS 'Lotek telemetry device credentials.'; + COMMENT ON COLUMN telemetry_credential_lotek.telemetry_credential_lotek_id IS '(Generated) Surrogate primary key identifier.'; + COMMENT ON COLUMN telemetry_credential_lotek.device_key IS '(Generated) The SIMS unique key for the device.'; + COMMENT ON COLUMN telemetry_credential_lotek.ndeviceid IS 'The Lotek unique id for the device.'; + COMMENT ON COLUMN telemetry_credential_lotek.strspecialid IS 'The Lotek IMEI number.'; + COMMENT ON COLUMN telemetry_credential_lotek.dtcreated IS 'The Lotek create date.'; + COMMENT ON COLUMN telemetry_credential_lotek.strsatellite IS 'The Lotek satellite name.'; + COMMENT ON COLUMN telemetry_credential_lotek.verified_date IS 'The date the credential was verified (by uploading the cfg file to lotek and confirming it is valid).'; + COMMENT ON COLUMN telemetry_credential_lotek.is_valid IS 'True if the credential is valid, false if it is invalid.'; + COMMENT ON COLUMN telemetry_credential_lotek.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN telemetry_credential_lotek.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_credential_lotek.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN telemetry_credential_lotek.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_credential_lotek.revision_count IS 'Revision count used for concurrency control.'; + + -- Add indexes + CREATE UNIQUE INDEX telemetry_credential_lotek_idx1 ON telemetry_credential_lotek(device_key); + + -- Add unique constraint on device_key and is_valid (only allow one credential per device to be valid) + ALTER TABLE telemetry_credential_lotek ADD CONSTRAINT telemetry_credential_lotek_uk1 UNIQUE (device_key, is_valid); + + ---------------------------------------------------------------------------------------- + + CREATE TABLE telemetry_credential_vectronic ( + telemetry_credential_vectronic_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + device_key varchar GENERATED ALWAYS AS ('vectronic:' || idcollar::text) STORED, + idcollar integer NOT NULL, + comtype varchar(50) NOT NULL, + idcom integer NOT NULL, + collarkey varchar(1000) NOT NULL, + collartype integer NOT NULL, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT telemetry_credential_vectronic_pk PRIMARY KEY (telemetry_credential_vectronic_id) + ); + + COMMENT ON TABLE telemetry_credential_vectronic IS 'Vectronic telemetry device credentials.'; + COMMENT ON COLUMN telemetry_credential_vectronic.telemetry_credential_vectronic_id IS '(Generated) Surrogate primary key identifier.'; + COMMENT ON COLUMN telemetry_credential_vectronic.device_key IS '(Generated) The SIMS unique key for the device.'; + COMMENT ON COLUMN telemetry_credential_vectronic.idcollar IS 'The Vectronic unique id for the device.'; + COMMENT ON COLUMN telemetry_credential_vectronic.comtype IS 'The Vectronic comtype field.'; + COMMENT ON COLUMN telemetry_credential_vectronic.idcom IS 'The Vectronic idcom field.'; + COMMENT ON COLUMN telemetry_credential_vectronic.collarkey IS 'The Vectronic device key.'; + COMMENT ON COLUMN telemetry_credential_vectronic.collartype IS 'The Vectronic device type.'; + COMMENT ON COLUMN telemetry_credential_vectronic.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN telemetry_credential_vectronic.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_credential_vectronic.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN telemetry_credential_vectronic.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN telemetry_credential_vectronic.revision_count IS 'Revision count used for concurrency control.'; + + -- Add indexes + CREATE UNIQUE INDEX telemetry_credential_vectronic_idx1 ON telemetry_credential_vectronic(device_key); + + ---------------------------------------------------------------------------------------- + + CREATE TABLE survey_telemetry_vendor_credential ( + survey_telemetry_vendor_credential_id integer GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1), + survey_telemetry_credential_attachment_id integer NOT NULL, + device_key varchar NOT NULL, + create_date timestamptz(6) DEFAULT now() NOT NULL, + create_user integer NOT NULL, + update_date timestamptz(6), + update_user integer, + revision_count integer DEFAULT 0 NOT NULL, + CONSTRAINT survey_telemetry_vendor_credential_pk PRIMARY KEY (survey_telemetry_vendor_credential_id) + ); + + COMMENT ON TABLE survey_telemetry_vendor_credential IS 'A record of a telemetry device credential that is associated with a survey.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.survey_telemetry_vendor_credential_id IS '(Generated) Surrogate primary key identifier.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.survey_telemetry_credential_attachment_id IS 'Foreign key to the survey_telemetry_credential_attachment table.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.device_key IS 'The SIMS unique key for the device.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.create_date IS 'The datetime the record was created.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.create_user IS 'The id of the user who created the record as identified in the system user table.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.update_date IS 'The datetime the record was updated.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.update_user IS 'The id of the user who updated the record as identified in the system user table.'; + COMMENT ON COLUMN survey_telemetry_vendor_credential.revision_count IS 'Revision count used for concurrency control.'; + + -- Add foreign key constraints + ALTER TABLE survey_telemetry_vendor_credential + ADD CONSTRAINT survey_telemetry_vendor_credential_fk1 + FOREIGN KEY (survey_telemetry_credential_attachment_id) + REFERENCES survey_telemetry_credential_attachment(survey_telemetry_credential_attachment_id); + + -- Add indexes for foreign keys + CREATE INDEX survey_telemetry_vendor_credential_idx1 ON survey_telemetry_vendor_credential(survey_telemetry_credential_attachment_id); + + -- Add indexes + CREATE INDEX survey_telemetry_vendor_credential_idx2 ON survey_telemetry_vendor_credential(device_key); + + ---------------------------------------------------------------------------------------- + -- Create audit/journal triggers + ---------------------------------------------------------------------------------------- + CREATE TRIGGER audit_deployment BEFORE INSERT OR UPDATE OR DELETE ON biohub.deployment FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_deployment AFTER INSERT OR UPDATE OR DELETE ON biohub.deployment FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + CREATE TRIGGER audit_device BEFORE INSERT OR UPDATE OR DELETE ON biohub.device FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_device AFTER INSERT OR UPDATE OR DELETE ON biohub.device FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + CREATE TRIGGER audit_telemetry_manual BEFORE INSERT OR UPDATE OR DELETE ON biohub.telemetry_manual FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_telemetry_manual AFTER INSERT OR UPDATE OR DELETE ON biohub.telemetry_manual FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + CREATE TRIGGER audit_telemetry_credential_lotek BEFORE INSERT OR UPDATE OR DELETE ON biohub.telemetry_credential_lotek FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_telemetry_credential_lotek AFTER INSERT OR UPDATE OR DELETE ON biohub.telemetry_credential_lotek FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + CREATE TRIGGER audit_telemetry_credential_vectronic BEFORE INSERT OR UPDATE OR DELETE ON biohub.telemetry_credential_vectronic FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_telemetry_credential_vectronic AFTER INSERT OR UPDATE OR DELETE ON biohub.telemetry_credential_vectronic FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + CREATE TRIGGER audit_survey_telemetry_vendor_credential BEFORE INSERT OR UPDATE OR DELETE ON biohub.survey_telemetry_vendor_credential FOR EACH ROW EXECUTE PROCEDURE tr_audit_trigger(); + CREATE TRIGGER journal_survey_telemetry_vendor_credential AFTER INSERT OR UPDATE OR DELETE ON biohub.survey_telemetry_vendor_credential FOR EACH ROW EXECUTE PROCEDURE tr_journal_trigger(); + + ---------------------------------------------------------------------------------------- + -- Create views + ---------------------------------------------------------------------------------------- + SET SEARCH_PATH=biohub_dapi_v1; + + CREATE OR REPLACE VIEW deployment AS SELECT * FROM biohub.deployment; + + CREATE OR REPLACE VIEW device AS SELECT * FROM biohub.device; + + CREATE OR REPLACE VIEW telemetry_manual AS SELECT * FROM biohub.telemetry_manual; + + CREATE OR REPLACE VIEW telemetry_credential_lotek AS SELECT * FROM biohub.telemetry_credential_lotek; + + CREATE OR REPLACE VIEW telemetry_credential_vectronic AS SELECT * FROM biohub.telemetry_credential_vectronic; + + CREATE OR REPLACE VIEW survey_telemetry_vendor_credential AS SELECT * FROM biohub.survey_telemetry_vendor_credential; + + `); +} + +export async function down(knex: Knex): Promise { + await knex.raw(``); +} diff --git a/database/src/procedures/delete_survey_procedure.ts b/database/src/procedures/delete_survey_procedure.ts index 460257526a..f34225640a 100644 --- a/database/src/procedures/delete_survey_procedure.ts +++ b/database/src/procedures/delete_survey_procedure.ts @@ -22,6 +22,8 @@ export async function seed(knex: Knex): Promise { BEGIN + -------- delete basic survey data -------- + WITH occurrence_submissions AS ( @@ -113,6 +115,11 @@ export async function seed(knex: Knex): Promise { DELETE FROM survey_report_attachment WHERE survey_id = p_survey_id; + DELETE FROM study_species_unit + WHERE study_species_unit_id IN ( + SELECT study_species_unit_id FROM study_species WHERE survey_id = p_survey_id + ); + DELETE FROM study_species WHERE survey_id = p_survey_id; @@ -152,17 +159,81 @@ export async function seed(knex: Knex): Promise { DELETE FROM survey_location WHERE survey_id = p_survey_id; + DELETE FROM survey_intended_outcome + WHERE survey_id = p_survey_id; + + -------- delete device, deployment, credential, telemetry data -------- + + DELETE FROM telemetry_manual + WHERE deployment_id IN (SELECT deployment_id FROM deployment WHERE survey_id = p_survey_id); + + DELETE FROM survey_telemetry_vendor_credential + WHERE survey_telemetry_credential_attachment_id IN (SELECT survey_telemetry_credential_attachment_id from survey_telemetry_credential_attachment WHERE survey_id = p_survey_id); + + DELETE FROM survey_telemetry_credential_attachment + WHERE survey_id = p_survey_id; + DELETE FROM deployment WHERE critter_id IN (SELECT critter_id FROM critter WHERE survey_id = p_survey_id); - DELETE FROM critter + DELETE FROM deployment WHERE survey_id = p_survey_id; - DELETE FROM survey_intended_outcome + DELETE FROM device + WHERE survey_id = p_survey_id; + + -------- delete animal data -------- + + DELETE FROM subcount_critter + WHERE critter_id IN (SELECT critter_id FROM critter WHERE survey_id = p_survey_id); + + DELETE FROM critter_mortality_attachment + WHERE critter_id IN (SELECT critter_id FROM critter WHERE survey_id = p_survey_id); + + DELETE FROM critter_capture_attachment + WHERE critter_id IN (SELECT critter_id FROM critter WHERE survey_id = p_survey_id); + + DELETE FROM critter WHERE survey_id = p_survey_id; -------- delete observation data -------- + DELETE FROM observation_subcount_qualitative_environment + WHERE observation_subcount_id IN ( + SELECT observation_subcount_id FROM observation_subcount + WHERE survey_observation_id IN ( + SELECT survey_observation_id FROM survey_observation + WHERE survey_id = p_survey_id + ) + ); + + DELETE FROM observation_subcount_quantitative_environment + WHERE observation_subcount_id IN ( + SELECT observation_subcount_id FROM observation_subcount + WHERE survey_observation_id IN ( + SELECT survey_observation_id FROM survey_observation + WHERE survey_id = p_survey_id + ) + ); + + DELETE FROM observation_subcount_qualitative_measurement + WHERE observation_subcount_id IN ( + SELECT observation_subcount_id FROM observation_subcount + WHERE survey_observation_id IN ( + SELECT survey_observation_id FROM survey_observation + WHERE survey_id = p_survey_id + ) + ); + + DELETE FROM observation_subcount_quantitative_measurement + WHERE observation_subcount_id IN ( + SELECT observation_subcount_id FROM observation_subcount + WHERE survey_observation_id IN ( + SELECT survey_observation_id FROM survey_observation + WHERE survey_id = p_survey_id + ) + ); + DELETE FROM observation_subcount WHERE survey_observation_id IN ( SELECT survey_observation_id FROM survey_observation @@ -195,6 +266,7 @@ export async function seed(knex: Knex): Promise { WHERE survey_id = p_survey_id; -------- delete sampling data -------- + DELETE FROM survey_sample_period WHERE survey_sample_method_id IN ( SELECT survey_sample_method_id @@ -220,8 +292,41 @@ export async function seed(knex: Knex): Promise { DELETE FROM survey_sample_site WHERE survey_id = p_survey_id; + -------- delete technique data -------- + + DELETE FROM method_technique_attractant + WHERE method_technique_id IN ( + SELECT method_technique_id + FROM method_technique + WHERE survey_id = p_survey_id + ); + + DELETE FROM method_technique_attribute_qualitative + WHERE method_technique_id IN ( + SELECT method_technique_id + FROM method_technique + WHERE survey_id = p_survey_id + ); + + DELETE FROM method_technique_attribute_quantitative + WHERE method_technique_id IN ( + SELECT method_technique_id + FROM method_technique + WHERE survey_id = p_survey_id + ); + + DELETE FROM method_technique_vantage_mode + WHERE method_technique_id IN ( + SELECT method_technique_id + FROM method_technique + WHERE survey_id = p_survey_id + ); + + DELETE FROM method_technique + WHERE survey_id = p_survey_id; -------- delete the survey -------- + DELETE FROM survey WHERE survey_id = p_survey_id; diff --git a/database/src/procedures/tr_before_deployment_generate_device_key.ts b/database/src/procedures/tr_before_deployment_generate_device_key.ts new file mode 100644 index 0000000000..cf5d69563c --- /dev/null +++ b/database/src/procedures/tr_before_deployment_generate_device_key.ts @@ -0,0 +1,55 @@ +import { Knex } from 'knex'; + +/** + * Function Name: generate_device_key_for_deployment_table + * + * Trigger Name: tr_before_deployment_generate_device_key + * + * Affected Tables: Device + * + * Purpose: Generates the 'device_key' column value for a new/updated device record. + * + * Note: The 'device_key' column is a concatenation of the 'name' column value from the 'device_make' code table and the + * 'serial' column value from the 'device' table. + * + * @example + * device_make_id = 'vendor:123456' + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function seed(knex: Knex): Promise { + await knex.raw(`--sql + SET search_path = 'biohub'; + + -- Function to generate the device_key value for a new/updated device record + CREATE OR REPLACE FUNCTION biohub.generate_device_key_for_deployment_table() + RETURNS trigger + LANGUAGE plpgsql + SECURITY invoker + AS $function$ + DECLARE + _device device%rowtype; + BEGIN + -- Get the device record for the incoming device_id + SELECT * FROM device where device_id = new.device_id INTO _device; + + -- If no matching device record was found, raise an exception + IF NOT found THEN + RAISE EXCEPTION 'Failed to generate deployment.device_key. The device_id (%) does not exist in the device table.', new.device_id; + END IF; + + -- Assign the device.device_key to the deployment.device_key + new.device_key := _device.device_key; + + -- Return the new deployment record with the device_key value + RETURN new; + END; + $function$; + + -- Drop the existing trigger, if one exists, and create a new one + DROP TRIGGER IF EXISTS tr_before_deployment_generate_device_key ON biohub.deployment; + CREATE TRIGGER tr_before_deployment_generate_device_key BEFORE INSERT OR UPDATE ON biohub.deployment FOR EACH ROW EXECUTE PROCEDURE generate_device_key_for_deployment_table(); + `); +} diff --git a/database/src/procedures/tr_before_device_generate_device_key.ts b/database/src/procedures/tr_before_device_generate_device_key.ts new file mode 100644 index 0000000000..a311bb8298 --- /dev/null +++ b/database/src/procedures/tr_before_device_generate_device_key.ts @@ -0,0 +1,55 @@ +import { Knex } from 'knex'; + +/** + * Function Name: generate_device_key_for_device_table + * + * Trigger Name: tr_before_device_generate_device_key + * + * Affected Tables: Device + * + * Purpose: Generates the 'device_key' column value for a new/updated device record. + * + * Note: The 'device_key' column is a concatenation of the 'name' column value from the 'device_make' code table and the + * 'serial' column value from the 'device' table. + * + * @example + * device_make_id = 'vendor:123456' + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function seed(knex: Knex): Promise { + await knex.raw(`--sql + SET search_path = 'biohub'; + + -- Function to generate the device_key value for a new/updated device record + CREATE OR REPLACE FUNCTION biohub.generate_device_key_for_device_table() + RETURNS trigger + LANGUAGE plpgsql + SECURITY invoker + AS $function$ + DECLARE + _device_make device_make%rowtype; + BEGIN + -- Get the device_make record for the incoming device_make_id + SELECT * FROM device_make where device_make_id = new.device_make_id INTO _device_make; + + -- If no matching device_make record was found, raise an exception + IF NOT found THEN + RAISE EXCEPTION 'Failed to generate device.device_key. The device_make_id (%) does not exist in the device_make table.', new.device_make_id; + END IF; + + -- Generate the device_key value and assign it to the device.device_key + new.device_key := (_device_make.name || ':' || new.serial); + + -- Return the new device record with the device_key value + RETURN new; + END; + $function$; + + -- Drop the existing trigger, if one exists, and create a new one + DROP TRIGGER IF EXISTS tr_before_device_generate_device_key ON biohub.device; + CREATE TRIGGER tr_before_device_generate_device_key BEFORE INSERT OR UPDATE ON biohub.device FOR EACH ROW EXECUTE PROCEDURE generate_device_key_for_device_table(); + `); +} diff --git a/database/src/seeds/04_telemetry.ts b/database/src/seeds/04_telemetry.ts new file mode 100644 index 0000000000..f849fd41e1 --- /dev/null +++ b/database/src/seeds/04_telemetry.ts @@ -0,0 +1,348 @@ +import { faker } from '@faker-js/faker'; +import { Knex } from 'knex'; + +const DB_SCHEMA = process.env.DB_SCHEMA; +const DB_SCHEMA_DAPI_V1 = process.env.DB_SCHEMA_DAPI_V1; + +const TELEMETRY_START_DATE = '2024-01-01'; +const TELEMETRY_END_DATE = '2025-01-01'; +const DEPLOYMENT_START_DATE = '2024-06-01'; // 6 months before telemetry start date +const DEPLOYMENT_END_DATE = TELEMETRY_END_DATE; // Same as telemetry end date +const INSERT_BATCH_SIZE = 100; +const NUM_TELEMETRY_POINTS = { + MANUAL: 50, + LOTEK: 100, + VECTRONIC: 50, + ATS: 10, + EXTRA: 100 +}; +const LOTEK_DEVICE = { + make: 'lotek', + model: 'SRX-800', + serial: '1111' +}; +const VECTRONIC_DEVICE = { + make: 'vectronic', + model: 'GPS-GSM-Tracker', + serial: '2222' +}; +const ATS_DEVICE = { + make: 'ats', + model: 'iPC', + serial: '3333' +}; +const DEVICES = [LOTEK_DEVICE, VECTRONIC_DEVICE, ATS_DEVICE]; + +/** + * Add telemetry and telemetry metadata to the database. + * + * Notes: + * - Each survey will have 3 devices, one of each make + * - Each device will have a deployment with associated telemetry + * - Each device will have telemetry data ie: vendor and manual + * - Seed device serials are generated using the survey ID and device serial ie: 12222 for survey 1 and device 2222 + * - Additional device serials are generated between 70000 and 80000 ie: 72800 + * + * @export + * @param {Knex} knex + * @return {*} {Promise} + */ +export async function seed(knex: Knex): Promise { + await knex.raw(` + SET SCHEMA '${DB_SCHEMA}'; + SET SEARCH_PATH=${DB_SCHEMA},${DB_SCHEMA_DAPI_V1}; + `); + + const device = await knex.select('*').from('device').limit(1); + + // If devices already exist, do not seed telemetry + if (device.length) { + return; + } + + const generateVectronicID = generateID(); + const surveys = await knex.select('*').from('survey'); + + for (const survey of surveys) { + const surveyId = survey.survey_id; + /** + * Each survey will have 3 devices, one of each make + */ + for (const device of DEVICES) { + /** + * + * INSERT DEVICE/CRITTER/DEPLOYMENT METADATA + * + */ + const rawDevice = await knex.raw(insertDevice(surveyId, device)); + const rawCritter = await knex.raw(insertCritter(surveyId)); + + const critterId = rawCritter.rows[0].critter_id; + const deviceId = rawDevice.rows[0].device_id; + const deviceSerial = rawDevice.rows[0].serial; + + const rawDeployment = await knex.raw(insertDeployment(surveyId, critterId, deviceId)); + const deploymentId = rawDeployment.rows[0].deployment_id; + + // MANUAL TELEMETRY + const manualTelemetry = getManualTelemetry(deploymentId, NUM_TELEMETRY_POINTS.MANUAL); + await knex.insert(manualTelemetry).into('telemetry_manual'); + + // LOTEK TELEMETRY + if (device.make === LOTEK_DEVICE.make) { + let telemetry = getLotekTelemetry(deviceSerial, NUM_TELEMETRY_POINTS.LOTEK); + + // Generate additional filler telemetry + for (let i = 0; i < 5; i++) { + const fakeSerial = faker.number.int({ min: 70000, max: 80000 }); + telemetry = telemetry.concat(getLotekTelemetry(fakeSerial, NUM_TELEMETRY_POINTS.EXTRA)); + } + + await knex.batchInsert('telemetry_lotek', telemetry, INSERT_BATCH_SIZE); + } + + // VECTRONIC TELEMETRY + if (device.make === VECTRONIC_DEVICE.make) { + let telemetry = getVectronicTelemetry(deviceSerial, NUM_TELEMETRY_POINTS.VECTRONIC, generateVectronicID); + + // Generate additional filler telemetry + for (let i = 0; i < 5; i++) { + const fakeSerial = faker.number.int({ min: 70000, max: 80000 }); + telemetry = telemetry.concat( + getVectronicTelemetry(fakeSerial, NUM_TELEMETRY_POINTS.EXTRA, generateVectronicID) + ); + } + + await knex.batchInsert('telemetry_vectronic', telemetry, INSERT_BATCH_SIZE); + } + + // ATS TELEMETRY + if (device.make === ATS_DEVICE.make) { + let telemetry = getAtsTelemetry(deviceSerial, NUM_TELEMETRY_POINTS.ATS); + + // Generate additional filler telemetry + for (let i = 0; i < 5; i++) { + const fakeSerial = faker.number.int({ min: 70000, max: 80000 }); + telemetry = telemetry.concat(getAtsTelemetry(fakeSerial, NUM_TELEMETRY_POINTS.EXTRA)); + } + + await knex.batchInsert('telemetry_ats', telemetry, INSERT_BATCH_SIZE); + } + } + } +} + +/** + * Generator function to create unique ID's. + * + * Actual generator usecase? + * Generates collision free vectronic vendor ID's (idposition). + * + */ +function* generateID() { + let id = 0; + while (true) { + yield id++; + } +} + +const getSurveyDeviceSerial = (surveyId: number, serial: string) => `${surveyId}${serial}`; + +/** + * SQL to insert a critter row. + * + */ +const insertCritter = (surveyId: number) => ` + INSERT INTO critter ( + survey_id, + critterbase_critter_id + ) + VALUES ( + ${surveyId}, + $$${faker.string.uuid()}$$ -- TODO: replace with actual critterbase critterID from critterbase seed + ) + RETURNING critter_id; +`; + +/** + * SQL to insert a device row. + * + */ +const insertDevice = (surveyId: number, device: { make: string; model: string; serial: string }) => ` + INSERT INTO device ( + survey_id, + serial, + device_make_id, + model + ) + VALUES ( + ${surveyId}, + $$${getSurveyDeviceSerial(surveyId, device.serial)}$$, + (SELECT device_make_id FROM device_make WHERE name = '${device.make}'), + $$${device.model}$$ + ) + RETURNING device_id, serial; +`; + +/** + * SQL to insert a deployment row. + * + */ +const insertDeployment = (surveyId: number, critterId: number, deviceId: number) => ` + INSERT INTO deployment ( + survey_id, + critter_id, + device_id, + attachment_start_date, + attachment_end_date + ) + VALUES ( + ${surveyId}, + ${critterId}, + ${deviceId}, + $$${DEPLOYMENT_START_DATE}$$, + $$${DEPLOYMENT_END_DATE}$$ + ) + RETURNING deployment_id; +`; + +/** + * Get manual telemetry data for insert. + * + */ +const getManualTelemetry = (deploymentId: number, numRecords: number) => { + const telemetry = []; + + for (let i = 0; i < numRecords; i++) { + const telemetryDate = faker.date.between({ from: TELEMETRY_START_DATE, to: TELEMETRY_END_DATE }); + + telemetry.push({ + deployment_id: deploymentId, + latitude: faker.location.latitude(), + longitude: faker.location.longitude(), + acquisition_date: telemetryDate, + transmission_date: telemetryDate + }); + } + + return telemetry; +}; + +/** + * Get telemetry data for Lotek devices. + * + */ +const getLotekTelemetry = (deviceSerial: number, numRecords: number) => { + const telemetry = []; + + for (let i = 0; i < numRecords; i++) { + const telemetryDate = faker.date.between({ from: TELEMETRY_START_DATE, to: TELEMETRY_END_DATE }); + const latitude = faker.location.latitude({ min: 48, max: 60 }); + const longitude = faker.location.longitude({ min: -139, max: -114 }); + + telemetry.push({ + deviceid: deviceSerial, + channelstatus: faker.hacker.adjective(), + uploadtimestamp: telemetryDate, + latitude: latitude, + longitude: longitude, + altitude: faker.number.int({ min: 0, max: 1000 }), + ecefx: faker.number.float({ min: 0, max: 10 }), + ecefy: faker.number.float({ min: 0, max: 10 }), + ecefz: faker.number.float({ min: 0, max: 10 }), + rxstatus: faker.number.int({ min: 0, max: 1 }), + temperature: faker.number.float({ min: -20, max: 40 }), + fixduration: faker.number.int({ min: 0, max: 100 }), + bhastempvoltage: false, + devname: faker.person.firstName(), + deltatime: null, + fixtype: faker.number.int({ min: 0, max: 3 }), + cepradius: faker.number.float({ min: 0, max: 10 }), + crc: null, + recdatetime: telemetryDate + }); + } + + return telemetry; +}; + +/** + * Get telemetry data for Vectronic devices. + * + */ +const getVectronicTelemetry = (deviceSerial: number, numRecords: number, generateID: Generator) => { + const telemetry = []; + + for (let i = 0; i < numRecords; i++) { + const telemetryDate = faker.date.between({ from: TELEMETRY_START_DATE, to: TELEMETRY_END_DATE }); + const latitude = faker.location.latitude({ min: 48, max: 60 }); + const longitude = faker.location.longitude({ min: -139, max: -114 }); + + telemetry.push({ + idcollar: deviceSerial, + idposition: generateID.next().value, + acquisitiontime: telemetryDate, + scts: telemetryDate, + origincode: 'G', + ecefx: faker.number.float({ min: 0, max: 10 }), + ecefy: faker.number.float({ min: 0, max: 10 }), + ecefz: faker.number.float({ min: 0, max: 10 }), + latitude: latitude, + longitude: longitude, + height: faker.number.int({ min: 0, max: 1000 }), + dop: faker.number.float({ min: 0, max: 10 }), + idfixtype: 10, + positionerror: faker.number.float({ min: 0, max: 10 }), + satcount: faker.number.int({ min: 0, max: 10 }), + // skipping all cannels ie: ch01satid, ch02satid... + idmortalitystatus: faker.number.int({ min: 0, max: 1 }), + activity: faker.number.int({ min: 0, max: 1 }), + mainvoltage: faker.number.float({ min: 0, max: 10 }), + backupvoltage: faker.number.float({ min: 0, max: 10 }), + temperature: faker.number.float({ min: -20, max: 40 }), + transformedx: faker.number.float({ min: 0, max: 10 }), + transformedy: faker.number.float({ min: 0, max: 10 }) + }); + } + + return telemetry; +}; + +/** + * Get telemetry data for ATS devices. + * + */ +const getAtsTelemetry = (deviceSerial: number, numRecords: number) => { + const telemetry = []; + + for (let i = 0; i < numRecords; i++) { + const telemetryDate = faker.date.between({ from: TELEMETRY_START_DATE, to: TELEMETRY_END_DATE }); + const latitude = faker.location.latitude({ min: 48, max: 60 }); + const longitude = faker.location.longitude({ min: -139, max: -114 }); + + telemetry.push({ + collarserialnumber: deviceSerial, + date: telemetryDate, + numberfixes: faker.number.int({ min: 0, max: 100 }), + battvoltage: faker.number.float({ min: 0, max: 10 }), + mortality: false, + breakoff: false, + gpsontime: faker.number.int({ min: 0, max: 100 }), + satontime: faker.number.int({ min: 0, max: 100 }), + saterrors: faker.number.int({ min: 0, max: 100 }), + gmtoffset: faker.number.int({ min: 0, max: 100 }), + lowbatt: false, + event: faker.hacker.verb(), + latitude: latitude, + longitude: longitude, + cepradius_km: faker.number.int({ min: 0, max: 10 }), + temperature: String(faker.number.float({ min: -20, max: 40 })), // TODO: Invesitgate why temperature is a string? + hdop: faker.string.alpha({ length: { min: 0, max: 10 } }), + numsats: faker.number.int({ min: 0, max: 10 }), + fixtime: faker.string.numeric({ length: { min: 0, max: 10 } }), + activity: faker.string.alpha({ length: { min: 0, max: 10 } }) + }); + } + + return telemetry; +}; diff --git a/env_config/env.docker b/env_config/env.docker index 5c4bce03cb..06bfa58b69 100644 --- a/env_config/env.docker +++ b/env_config/env.docker @@ -103,15 +103,6 @@ BACKBONE_ARTIFACT_INTAKE_PATH=/api/artifact/intake BIOHUB_TAXON_PATH=/api/taxonomy/taxon BIOHUB_TAXON_TSN_PATH=/api/taxonomy/taxon/tsn -# ------------------------------------------------------------------------------ -# API - BC Telemetry Warehouse Connection -# ------------------------------------------------------------------------------ -# BCTW Platform - BCTW API URL -# (Note): If BCTW is running locally, you can use: -# Windows OS (Docker Desktop): `http://host.docker.internal:/api` -# Linux OS (Linux Docker): `http://172.17.0.1:/api` -BCTW_API_HOST=https://moe-bctw-api-dev.apps.silver.devops.gov.bc.ca - # ------------------------------------------------------------------------------ # API - Critterbase Connection # ------------------------------------------------------------------------------ @@ -246,3 +237,16 @@ NUM_SEED_OBSERVATIONS_PER_SURVEY=3 # Sets the number of desired seed subcounts to generate per observation. defaults to 1. NUM_SEED_SUBCOUNTS_PER_OBSERVATION=1 + + +# ------------------------------------------------------------------------------ +# Telemetry Configuration +# ------------------------------------------------------------------------------ + +# Lotek API +LOTEK_API_HOST=https://api.lotek.com +LOTEK_ACCOUNT_USERNAME= +LOTEK_ACCOUNT_PASSWORD= + +# Vectronic API +VECTRONIC_API_HOST=https://api.vectronic-wildlife.com/v2