From 5892232e1a381b311b444630fc6ed239569a369f Mon Sep 17 00:00:00 2001 From: ascariandrea Date: Thu, 16 Jan 2025 21:57:34 +0100 Subject: [PATCH] fix: extract implementation from context make --- .github/workflows/packages-pull-request.yml | 2 + .../@liexp/backend/src/context/api.context.ts | 2 +- packages/@liexp/backend/src/context/index.ts | 5 - .../src/context/urlMetadata.context.ts | 5 + packages/@liexp/backend/src/errors/index.ts | 2 +- .../event/createEventFromURL.flow.spec.ts | 132 +++++++++++ .../flows/event/createEventFromURL.flow.ts | 24 +- .../flows/event/extractFromURL.flow.spec.ts | 135 +++++++++++ .../src/flows/event/extractFromURL.flow.ts | 109 ++++----- .../backend/src/flows/links/link.flow.ts | 2 +- .../thumbnails/extractMP4Thumbnail.flow.ts | 2 +- .../src/flows/tg/MessageParser/index.ts | 2 +- .../createFromTGMessage.flow.spec.ts | 65 ++---- .../upsertPinnedMessage.flow.spec.ts | 11 +- .../src/flows/tg/createFromTGMessage.flow.ts | 2 +- .../src/flows/tg/parseMessages.flow.ts | 2 +- .../backend/src/flows/tg/parseURL.flow.ts | 2 +- .../src/providers/URLMetadata.provider.ts | 2 +- .../providers/ffmpeg/ffmpeg.provider.spec.ts | 2 +- .../backend/src/providers/fs/fs.provider.ts | 25 +- .../backend/src/providers/ner/ner.provider.ts | 5 +- .../src/providers/puppeteer.provider.ts | 12 +- .../src/providers/redis/RedisPubSub.ts | 44 ++++ .../backend/src/providers/redis/Subscriber.ts | 74 ++++++ .../src/providers/redis/redis.error.ts | 25 ++ .../src/providers/redis/redis.provider.ts | 147 ++---------- ....provider.ts => creates3ProviderConfig.ts} | 17 +- .../backend/src/providers/tg/tg.provider.ts | 8 +- .../src/pubsub/buildImageWithSharp.pubSub.ts | 8 +- .../events/createEventFromURL.pubSub.ts | 2 +- .../src/pubsub/jobs/processJobDone.pubSub.ts | 2 +- .../pubsub/media/createThumbnail.pubSub.ts | 2 +- .../pubsub/media/extractMediaExtra.pubSub.ts | 2 +- .../pubsub/media/generateThumbnail.pubSub.ts | 2 +- .../transferFromExternalProvider.pubSub.ts | 2 +- .../nlp/extractEntitiesWithNLP.pubSub.ts | 2 +- .../pubsub/postToSocialPlatforms.pubSub.ts | 2 +- .../src/pubsub/searchFromWikipedia.pubSub.ts | 2 +- .../pubsub/stats/createEntityStats.pubSub.ts | 2 +- .../src/queries/events/createEvent.query.ts | 2 +- .../src/queries/events/editEvent.query.ts | 2 +- .../events/fetchEventRelations.query.ts | 2 +- packages/@liexp/backend/src/test/index.ts | 121 ++++++++++ packages/@liexp/backend/src/test/mocks.ts | 38 ++- .../@liexp/backend/src/test/mocks/db.mock.ts | 2 + .../@liexp/backend/src/test/mocks/fs.mock.ts | 9 + .../backend/src/test/mocks/mock.utils.ts | 20 ++ .../backend/src/test/mocks/sharp.mock.ts | 7 +- ...1785bf42b588eaa482b4a024c964d3096dc6e0.txt | 1 + ....timestamp-1737064274359-67b01676a0689.mjs | 57 +++++ .../shared/src/endpoints/admin.endpoints.ts | 2 +- .../src/tests/arbitrary/Date.arbitrary.ts | 2 +- .../HumanReadableString.arbitrary.ts | 2 +- .../src/tests/arbitrary/URL.arbitrary.ts | 2 +- .../tests/arbitrary/common/UUID.arbitrary.ts | 2 +- .../events/ScientificStudy.arbitrary.ts | 2 +- services/ai-bot/src/run.ts | 3 +- services/api/src/app/config.ts | 8 - services/api/src/context/context.type.ts | 2 +- services/api/src/context/index.ts | 20 +- services/api/src/io/ControllerError.ts | 2 +- .../admin/images/buildImage.controller.ts | 8 +- .../events/__tests__/searchEvents.e2e.ts | 2 +- .../__tests__/createScientificStudy.e2e.ts | 82 ------- .../routes/media/__tests__/createMedia.e2e.ts | 1 - services/api/src/run.ts | 8 + services/api/test/AppTest.ts | 14 +- services/api/test/GetDockerContainer.ts | 221 +++++++++--------- services/api/test/testSetup.ts | 15 -- services/api/test/vitest.config.e2e.ts | 13 +- services/worker/src/bin/start-ctx.ts | 5 +- services/worker/src/context/context.ts | 2 +- services/worker/src/context/load.ts | 87 ++++++- services/worker/src/context/make.ts | 178 +++++++------- services/worker/src/io/worker.error.ts | 2 +- .../services/subscribers/WorkerSubscribers.ts | 2 +- .../event/createEventFromURL.subscriber.ts | 2 +- .../media/extractMediaExtra.subscriber.ts | 2 +- .../media/generateThumbnail.subscriber.ts | 2 +- ...transferFromExternalProvider.subscriber.ts | 2 +- .../nlp/extractEntitiesWithNLP.subscriber.ts | 2 +- .../PostToSocialPlatforms.subscriber.ts | 5 +- services/worker/test/WorkerTest.ts | 4 +- 83 files changed, 1166 insertions(+), 694 deletions(-) create mode 100644 packages/@liexp/backend/src/context/urlMetadata.context.ts create mode 100644 packages/@liexp/backend/src/flows/event/createEventFromURL.flow.spec.ts create mode 100644 packages/@liexp/backend/src/flows/event/extractFromURL.flow.spec.ts create mode 100644 packages/@liexp/backend/src/providers/redis/RedisPubSub.ts create mode 100644 packages/@liexp/backend/src/providers/redis/Subscriber.ts create mode 100644 packages/@liexp/backend/src/providers/redis/redis.error.ts rename packages/@liexp/backend/src/providers/space/{creates3.provider.ts => creates3ProviderConfig.ts} (68%) create mode 100644 packages/@liexp/backend/src/test/index.ts create mode 100644 packages/@liexp/backend/src/test/mocks/fs.mock.ts create mode 100644 packages/@liexp/backend/src/test/mocks/mock.utils.ts create mode 100644 packages/@liexp/backend/urls/0d1785bf42b588eaa482b4a024c964d3096dc6e0.txt create mode 100644 packages/@liexp/backend/vitest.config.ts.timestamp-1737064274359-67b01676a0689.mjs diff --git a/.github/workflows/packages-pull-request.yml b/.github/workflows/packages-pull-request.yml index a690714411..5f72b030dd 100644 --- a/.github/workflows/packages-pull-request.yml +++ b/.github/workflows/packages-pull-request.yml @@ -30,6 +30,7 @@ jobs: pnpm core lint pnpm shared lint pnpm test lint + pnpm backend lint pnpm ui lint - name: Run tests @@ -37,3 +38,4 @@ jobs: DEBUG: "-@liexp*" run: | pnpm shared test + pnpm backend test diff --git a/packages/@liexp/backend/src/context/api.context.ts b/packages/@liexp/backend/src/context/api.context.ts index f91b585dfc..bc4b3a9afc 100644 --- a/packages/@liexp/backend/src/context/api.context.ts +++ b/packages/@liexp/backend/src/context/api.context.ts @@ -1,5 +1,5 @@ import { type Endpoints } from "@liexp/shared/lib/endpoints/index.js"; -import { type EndpointsRESTClient } from "@liexp/shared/lib/providers/EndpointsRESTClient/types"; +import { type EndpointsRESTClient } from "@liexp/shared/lib/providers/EndpointsRESTClient/types.js"; export interface APIContext { api: EndpointsRESTClient; diff --git a/packages/@liexp/backend/src/context/index.ts b/packages/@liexp/backend/src/context/index.ts index 105aa6dca1..afdd6d2d8b 100644 --- a/packages/@liexp/backend/src/context/index.ts +++ b/packages/@liexp/backend/src/context/index.ts @@ -1,4 +1,3 @@ -import { type URLMetadataClient } from "../providers/URLMetadata.provider.js"; import { type FFMPEGProvider } from "../providers/ffmpeg/ffmpeg.provider.js"; import { type GeocodeProvider } from "../providers/geocode/geocode.provider.js"; import { type IGProvider } from "../providers/ig/ig.provider.js"; @@ -7,10 +6,6 @@ import { type NERProvider } from "../providers/ner/ner.provider.js"; import { type TGBotProvider } from "../providers/tg/tg.provider.js"; import { type WikipediaProvider } from "../providers/wikipedia/wikipedia.provider.js"; -export interface URLMetadataContext { - urlMetadata: URLMetadataClient; -} - export interface FFMPEGProviderContext { ffmpeg: FFMPEGProvider; } diff --git a/packages/@liexp/backend/src/context/urlMetadata.context.ts b/packages/@liexp/backend/src/context/urlMetadata.context.ts new file mode 100644 index 0000000000..8a0a537159 --- /dev/null +++ b/packages/@liexp/backend/src/context/urlMetadata.context.ts @@ -0,0 +1,5 @@ +import type { URLMetadataClient } from "../providers/URLMetadata.provider.js"; + +export interface URLMetadataContext { + urlMetadata: URLMetadataClient; +} diff --git a/packages/@liexp/backend/src/errors/index.ts b/packages/@liexp/backend/src/errors/index.ts index 058c4917a2..28c5aa9585 100644 --- a/packages/@liexp/backend/src/errors/index.ts +++ b/packages/@liexp/backend/src/errors/index.ts @@ -3,4 +3,4 @@ export * from "./NotAuthorizedError.js"; export * from "./NotFoundError.js"; export * from "./ServerError.js"; -export { IOError } from "ts-io-error/lib/index.js"; +export { IOError } from "ts-io-error"; diff --git a/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.spec.ts b/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.spec.ts new file mode 100644 index 0000000000..07fff8ffe1 --- /dev/null +++ b/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.spec.ts @@ -0,0 +1,132 @@ +import { fp, pipe } from "@liexp/core/lib/fp/index.js"; +import { uuid } from "@liexp/shared/lib/io/http/Common/UUID.js"; +import { SCIENTIFIC_STUDY } from "@liexp/shared/lib/io/http/Events/EventType.js"; +import { HumanReadableStringArb } from "@liexp/shared/lib/tests/arbitrary/HumanReadableString.arbitrary.js"; +import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; +import { sanitizeURL } from "@liexp/shared/lib/utils/url.utils.js"; +import { fc } from "@liexp/test"; +import { describe, expect, it } from "vitest"; +import { mockDeep } from "vitest-mock-extended"; +import { EventV2Entity } from "../../entities/Event.v2.entity.js"; +import { UserEntity } from "../../entities/User.entity.js"; +import { initContext } from "../../test/index.js"; +import { mockedContext, mockTERightOnce } from "../../test/mocks/mock.utils.js"; +import { + createEventFromURL, + type CreateEventFromURLContext, +} from "./createEventFromURL.flow.js"; + +describe(createEventFromURL.name, () => { + const appTest = { + ctx: mockedContext({ + puppeteer: mockDeep(), + logger: mockDeep(), + db: mockDeep(), + ner: mockDeep(), + fs: mockDeep(), + urlMetadata: mockDeep(), + config: initContext().config, + }), + }; + + it("should create an event from a URL", async () => { + const [url] = fc + .sample(fc.nat(), 1) + .map((id) => `https://www.sciencedirect.com/article/${id}` as any); + + const title = fc.sample(HumanReadableStringArb(), 1)[0]; + const description = fc.sample(HumanReadableStringArb(), 1)[0]; + + // no url in db + mockTERightOnce(appTest.ctx.db.execQuery, () => null); + // event by url + mockTERightOnce(appTest.ctx.puppeteer.execute, () => + fp.O.some({ + type: SCIENTIFIC_STUDY.value, + date: new Date(), + payload: { + title, + url, + description, + }, + }), + ); + + // link by url + let savedEvent: any; + mockTERightOnce(appTest.ctx.db.save, (_, event) => { + savedEvent = event[0]; + return event; + }); + + mockTERightOnce(appTest.ctx.db.findOneOrFail, () => savedEvent); + + // mocks.urlMetadata.fetchMetadata.mockResolvedValue({ + // title, + // description, + // url: scientificStudyData.url, + // keywords: [], + // }); + + // mockTERightOnce(appTest.ctx.puppeteer.getBrowser, () => null); + // mocks.puppeteer.page.goto.mockResolvedValueOnce(undefined); + + // // evaluate title + // mocks.puppeteer.page.$eval.mockResolvedValueOnce(title); + // // evaluate dropdown click + // mocks.puppeteer.page.click.mockResolvedValueOnce(undefined); + // // evaluate date string + // mocks.puppeteer.page.$eval.mockResolvedValueOnce([ + // "Received 27 July 2020", + // "Accepted 1 August 2020", + // ]); + // // wait for + // mocks.puppeteer.page.waitForSelector.mockResolvedValueOnce(undefined); + // mocks.puppeteer.page.$$.mockResolvedValueOnce([ + // { + // evaluate: vi.fn().mockResolvedValue(description), + // }, + // ]); + + // mocks.puppeteer.page.$eval.mockResolvedValueOnce("page content"); + + // mocks.ner.winkMethods.learnCustomEntities.mockResolvedValueOnce({} as any); + // mocks.ner.doc.out.mockReturnValue([]); + // mocks.ner.doc.sentences.mockReturnValue({ each: vi.fn() } as any); + // mocks.ner.doc.customEntities.mockReturnValue({ + // out: vi.fn().mockReturnValue([]), + // } as any); + // mocks.ner.doc.tokens.mockReturnValue({ each: vi.fn() } as any); + + // mocks.fs.existsSync.mockReturnValue(false); + // mocks.fs.readFileSync.mockReturnValue("[]"); + + const user = new UserEntity(); + + const event: any = await pipe( + createEventFromURL( + user, + uuid(), + url, + SCIENTIFIC_STUDY.value, + )(appTest.ctx), + throwTE, + ); + + expect(appTest.ctx.db.save).toHaveBeenCalledWith(EventV2Entity, [ + expect.objectContaining({ + type: SCIENTIFIC_STUDY.value, + payload: { + title, + description, + url: sanitizeURL(url), + }, + }), + ]); + + expect(event.type).toBe(SCIENTIFIC_STUDY.value); + expect(event.date).toBe(savedEvent.date); + + expect(event.payload).toMatchObject(savedEvent.payload); + }); +}); diff --git a/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.ts b/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.ts index e81d4091ad..a2e1ee03d8 100644 --- a/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.ts +++ b/packages/@liexp/backend/src/flows/event/createEventFromURL.flow.ts @@ -8,27 +8,25 @@ import { Equal } from "typeorm"; import { type ConfigContext } from "../../context/config.context.js"; import { type DatabaseContext } from "../../context/db.context.js"; import { type FSClientContext } from "../../context/fs.context.js"; -import { - type NERProviderContext, - type URLMetadataContext, -} from "../../context/index.js"; +import { type NERProviderContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; import { type PuppeteerProviderContext } from "../../context/puppeteer.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { EventV2Entity } from "../../entities/Event.v2.entity.js"; import { type UserEntity } from "../../entities/User.entity.js"; import { ServerError } from "../../errors/ServerError.js"; import { findByURL } from "../../queries/events/scientificStudy.query.js"; import { extractEventFromURL } from "./extractFromURL.flow.js"; -export const createEventFromURL = < - C extends LoggerContext & - ConfigContext & - FSClientContext & - NERProviderContext & - DatabaseContext & - URLMetadataContext & - PuppeteerProviderContext, ->( +export type CreateEventFromURLContext = LoggerContext & + ConfigContext & + FSClientContext & + NERProviderContext & + DatabaseContext & + URLMetadataContext & + PuppeteerProviderContext; + +export const createEventFromURL = ( user: UserEntity, eventId: UUID, url: URL, diff --git a/packages/@liexp/backend/src/flows/event/extractFromURL.flow.spec.ts b/packages/@liexp/backend/src/flows/event/extractFromURL.flow.spec.ts new file mode 100644 index 0000000000..f29744f692 --- /dev/null +++ b/packages/@liexp/backend/src/flows/event/extractFromURL.flow.spec.ts @@ -0,0 +1,135 @@ +import { fp, pipe } from "@liexp/core/lib/fp/index.js"; +import { uuid } from "@liexp/shared/lib/io/http/Common/UUID.js"; +import { SCIENTIFIC_STUDY } from "@liexp/shared/lib/io/http/Events/EventType.js"; +import { HumanReadableStringArb } from "@liexp/shared/lib/tests/arbitrary/HumanReadableString.arbitrary.js"; +import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; +import { sanitizeURL } from "@liexp/shared/lib/utils/url.utils.js"; +import { fc } from "@liexp/test"; +import { describe, expect, it, vi } from "vitest"; +import { mock } from "vitest-mock-extended"; +import { LinkEntity } from "../../entities/Link.entity.js"; +import { UserEntity } from "../../entities/User.entity.js"; +import { initContext, testConfig } from "../../test/index.js"; +import { mockedContext, mockTERightOnce } from "../../test/mocks/mock.utils.js"; +import { mocks } from "../../test/mocks.js"; +import { + createEventFromURL, + type CreateEventFromURLContext, +} from "./createEventFromURL.flow.js"; +import { extractEventFromURL } from "./extractFromURL.flow.js"; + +describe.skip(extractEventFromURL.name, () => { + const appTest = { + ctx: mockedContext({ + puppeteer: mock(), + logger: mock(), + db: mock(), + ner: mock(), + fs: mock(), + urlMetadata: mock(), + config: testConfig, + }), + }; + + it("should create an event from a URL", async () => { + const [url] = fc + .sample(fc.nat(), 1) + .map((id) => `https://www.sciencedirect.com/article/${id}` as any); + + const title = fc.sample(HumanReadableStringArb(), 1)[0]; + const description = fc.sample(HumanReadableStringArb(), 1)[0]; + + const scientificStudyData = { url }; + + // event by url + mockTERightOnce(appTest.ctx.db.execQuery, () => null); + // link by url + mockTERightOnce(appTest.ctx.db.findOne, () => null); + // save link + let savedEvent: any; + appTest.ctx.db.save.mockImplementation((_, link) => { + const l: any = link; + if (l[0].type === SCIENTIFIC_STUDY.value) { + savedEvent = l[0]; + } + return fp.TE.right(link); + }); + + mockTERightOnce(appTest.ctx.db.findOneOrFail, () => savedEvent); + + mocks.redis.publish.mockResolvedValue(1); + mocks.urlMetadata.fetchMetadata.mockResolvedValue({ + title, + description, + url: scientificStudyData.url, + keywords: [], + }); + + mockTERightOnce(appTest.ctx.puppeteer.getBrowser, () => null); + mocks.puppeteer.page.goto.mockResolvedValueOnce(undefined); + + // evaluate title + mocks.puppeteer.page.$eval.mockResolvedValueOnce(title); + // evaluate dropdown click + mocks.puppeteer.page.click.mockResolvedValueOnce(undefined); + // evaluate date string + mocks.puppeteer.page.$eval.mockResolvedValueOnce([ + "Received 27 July 2020", + "Accepted 1 August 2020", + ]); + // wait for + mocks.puppeteer.page.waitForSelector.mockResolvedValueOnce(undefined); + mocks.puppeteer.page.$$.mockResolvedValueOnce([ + { + evaluate: vi.fn().mockResolvedValue(description), + }, + ]); + + mocks.puppeteer.page.$eval.mockResolvedValueOnce("page content"); + + mocks.ner.winkMethods.learnCustomEntities.mockResolvedValueOnce({} as any); + mocks.ner.doc.out.mockReturnValue([]); + mocks.ner.doc.sentences.mockReturnValue({ each: vi.fn() } as any); + mocks.ner.doc.customEntities.mockReturnValue({ + out: vi.fn().mockReturnValue([]), + } as any); + mocks.ner.doc.tokens.mockReturnValue({ each: vi.fn() } as any); + + mocks.fs.existsSync.mockReturnValue(false); + mocks.fs.readFileSync.mockReturnValue("[]"); + + const user = new UserEntity(); + + const event: any = await pipe( + createEventFromURL( + user, + uuid(), + scientificStudyData.url, + SCIENTIFIC_STUDY.value, + )(appTest.ctx), + throwTE, + ); + + expect(mocks.ner).toHaveBeenCalledTimes(1); + expect(mocks.ner.winkMethods.learnCustomEntities).toHaveBeenCalledTimes(1); + expect(mocks.ner.winkMethods.readDoc).toHaveBeenCalledTimes(1); + expect(mocks.ner.doc.out).toHaveBeenCalledTimes(1); + + expect(mocks.db.connection.manager.save).toHaveBeenCalledWith( + LinkEntity, + + [ + expect.objectContaining({ + url: sanitizeURL(scientificStudyData.url), + }), + ], + undefined, + ); + + expect(event.type).toBe(SCIENTIFIC_STUDY.value); + expect(event.date).toBeDefined(); + + expect(event.payload.url).toBeInstanceOf(String); + expect(event.payload.title).toEqual(title); + }); +}); diff --git a/packages/@liexp/backend/src/flows/event/extractFromURL.flow.ts b/packages/@liexp/backend/src/flows/event/extractFromURL.flow.ts index 2bdeb445f4..64e81da776 100644 --- a/packages/@liexp/backend/src/flows/event/extractFromURL.flow.ts +++ b/packages/@liexp/backend/src/flows/event/extractFromURL.flow.ts @@ -3,8 +3,10 @@ import { getRelationIdsFromEventRelations } from "@liexp/shared/lib/helpers/even import { getSuggestions } from "@liexp/shared/lib/helpers/event-suggestion.js"; import { uuid } from "@liexp/shared/lib/io/http/Common/UUID.js"; import { type URL as URLT } from "@liexp/shared/lib/io/http/Common/index.js"; -import { type EventType } from "@liexp/shared/lib/io/http/Events/EventType.js"; -import { type ImageType } from "@liexp/shared/lib/io/http/Media/index.js"; +import { + SCIENTIFIC_STUDY, + type EventType, +} from "@liexp/shared/lib/io/http/Events/EventType.js"; import { toInitialValue } from "@liexp/shared/lib/providers/blocknote/utils.js"; import { parse } from "date-fns"; import { sequenceS } from "fp-ts/lib/Apply.js"; @@ -16,15 +18,14 @@ import type * as puppeteer from "puppeteer-core"; import { type ConfigContext } from "../../context/config.context.js"; import { type DatabaseContext } from "../../context/db.context.js"; import { type FSClientContext } from "../../context/fs.context.js"; -import { - type URLMetadataContext, - type NERProviderContext, -} from "../../context/index.js"; +import { type NERProviderContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { type EventV2Entity } from "../../entities/Event.v2.entity.js"; import { type LinkEntity } from "../../entities/Link.entity.js"; import { type UserEntity } from "../../entities/User.entity.js"; import { ServerError } from "../../errors/ServerError.js"; +import { LinkIO } from "../../io/link.io.js"; import { extractRelationsFromURL } from "../admin/nlp/extractRelationsFromURL.flow.js"; import { fetchAndSave } from "../links/link.flow.js"; @@ -75,7 +76,7 @@ const extractPageMetadataFromProviderLink = ); return O.some({ - type: "ScientificStudy", + type: SCIENTIFIC_STUDY.value, title, date: date.toISOString(), description: contentText ?? "", @@ -113,7 +114,7 @@ const extractPageMetadataFromProviderLink = ); return O.some({ - type: "ScientificStudy", + type: SCIENTIFIC_STUDY.value, title, date: date.toISOString(), description: contentText ?? "", @@ -141,7 +142,7 @@ const extractPageMetadataFromProviderLink = ); return O.some({ - type: "ScientificStudy", + type: SCIENTIFIC_STUDY.value, title, date: date.toISOString(), description: contentText ?? "", @@ -176,7 +177,7 @@ const extractByProvider = return pipe( TE.Do, TE.bind("relations", () => - sequenceS(TE.ApplicativePar)({ + sequenceS(TE.ApplicativeSeq)({ relations: extractRelationsFromURL(p, l.url)(ctx), provider: extractPageMetadataFromProviderLink(p, host, l)(ctx), }), @@ -191,58 +192,46 @@ const extractByProvider = }) => { if (fp.O.isSome(provider)) { return pipe( - TE.tryCatch(() => { - return getSuggestions((v) => - Promise.resolve(toInitialValue(v)), - )( - provider.value, - O.some({ - id: l.id, - title: l.title, - description: l.description ?? l.title, - publishDate: l.publishDate ?? undefined, - provider: l.provider as any, - creator: l.creator?.id, - url: l.url, - image: l.image - ? { - ...l.image, - label: l.image.label ?? undefined, - description: l.image.description ?? undefined, - thumbnail: l.image.thumbnail ?? undefined, - type: l.image as any as ImageType, - extra: l.image.extra ?? undefined, - events: [], - links: [], - keywords: [], - areas: [], - } - : undefined, - keywords: [], - events: [], - actors: [], - groups: [], - media: [], - socialPosts: [], - createdAt: new Date(), - updatedAt: new Date(), - deletedAt: undefined, - }), - O.none, - getRelationIdsFromEventRelations({ - groupsMembers: [], - media: [], - areas: [], - actors: entities.actors as any[], - groups: entities.groups as any[], - keywords: entities.keywords as any[], - links: entities.links as any[], - }), + TE.Do, + TE.bind("link", () => { + return pipe( + LinkIO.decodeSingle(l), + fp.E.fold(() => fp.O.none, fp.O.some), + fp.TE.right, ); - }, ServerError.fromUnknown), - TE.map((suggestions) => - suggestions.find((s) => s.event.type === type), + }), + TE.bind("relations", () => + pipe( + fp.TE.right( + getRelationIdsFromEventRelations({ + groupsMembers: [], + media: [], + areas: [], + actors: entities.actors as any[], + groups: entities.groups as any[], + keywords: entities.keywords as any[], + links: entities.links as any[], + }), + ), + ), + ), + TE.chain(({ link, relations }) => + TE.tryCatch(() => { + const suggestionMaker = getSuggestions((v) => + Promise.resolve(toInitialValue(v)), + ); + + return suggestionMaker( + provider.value, + link, + O.none, + relations, + ); + }, ServerError.fromUnknown), ), + TE.map((suggestions) => { + return suggestions.find((s) => s.event.type === type); + }), TE.map(O.fromNullable), ); } diff --git a/packages/@liexp/backend/src/flows/links/link.flow.ts b/packages/@liexp/backend/src/flows/links/link.flow.ts index 7c593bee19..dcd5f9f05d 100644 --- a/packages/@liexp/backend/src/flows/links/link.flow.ts +++ b/packages/@liexp/backend/src/flows/links/link.flow.ts @@ -11,8 +11,8 @@ import { DateFromISOString } from "io-ts-types/lib/DateFromISOString.js"; import { type Metadata } from "page-metadata-parser"; import { Equal } from "typeorm"; import { type DatabaseContext } from "../../context/db.context.js"; -import { type URLMetadataContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { type LinkEntity } from "../../entities/Link.entity.js"; import { type UserEntity } from "../../entities/User.entity.js"; import { ServerError } from "../../errors/ServerError.js"; diff --git a/packages/@liexp/backend/src/flows/media/thumbnails/extractMP4Thumbnail.flow.ts b/packages/@liexp/backend/src/flows/media/thumbnails/extractMP4Thumbnail.flow.ts index 1c865d712e..6a651eb4d5 100644 --- a/packages/@liexp/backend/src/flows/media/thumbnails/extractMP4Thumbnail.flow.ts +++ b/packages/@liexp/backend/src/flows/media/thumbnails/extractMP4Thumbnail.flow.ts @@ -1,4 +1,4 @@ -import * as fs from "node:fs/promises"; +import * as fs from "fs/promises"; import path from "path"; import { fp, pipe } from "@liexp/core/lib/fp/index.js"; import { diff --git a/packages/@liexp/backend/src/flows/tg/MessageParser/index.ts b/packages/@liexp/backend/src/flows/tg/MessageParser/index.ts index a0faf267d9..1e009afc7f 100644 --- a/packages/@liexp/backend/src/flows/tg/MessageParser/index.ts +++ b/packages/@liexp/backend/src/flows/tg/MessageParser/index.ts @@ -19,7 +19,6 @@ import { type ENVContext } from "../../../context/env.context.js"; import { type FSClientContext } from "../../../context/fs.context.js"; import { type HTTPProviderContext } from "../../../context/http.context.js"; import { - type URLMetadataContext, type TGBotProviderContext, type FFMPEGProviderContext, type ImgProcClientContext, @@ -29,6 +28,7 @@ import { type PDFProviderContext } from "../../../context/pdf.context.js"; import { type PuppeteerProviderContext } from "../../../context/puppeteer.context.js"; import { type QueuesProviderContext } from "../../../context/queue.context.js"; import { type SpaceContext } from "../../../context/space.context.js"; +import { type URLMetadataContext } from "../../../context/urlMetadata.context.js"; import { type UserEntity } from "../../../entities/User.entity.js"; import { type TGError } from "../../../providers/tg/tg.provider.js"; import { LoggerService } from "../../../services/logger/logger.service.js"; diff --git a/packages/@liexp/backend/src/flows/tg/__tests__/createFromTGMessage.flow.spec.ts b/packages/@liexp/backend/src/flows/tg/__tests__/createFromTGMessage.flow.spec.ts index f741896de0..158766fb74 100644 --- a/packages/@liexp/backend/src/flows/tg/__tests__/createFromTGMessage.flow.spec.ts +++ b/packages/@liexp/backend/src/flows/tg/__tests__/createFromTGMessage.flow.spec.ts @@ -1,12 +1,8 @@ -import * as fs from "fs"; import path from "path"; -import { GetLogger } from "@liexp/core"; import { fp, pipe } from "@liexp/core/lib/fp/index.js"; import { getPlatformEmbedURL } from "@liexp/shared/lib/helpers/media.helper.js"; import { uuid } from "@liexp/shared/lib/io/http/Common/UUID.js"; import { AdminCreate } from "@liexp/shared/lib/io/http/User.js"; -import { HTTPProvider } from "@liexp/shared/lib/providers/http/http.provider.js"; -import { PDFProvider } from "@liexp/shared/lib/providers/pdf/pdf.provider.js"; import { HumanReadableStringArb } from "@liexp/shared/lib/tests/arbitrary/HumanReadableString.arbitrary.js"; import { URLArb } from "@liexp/shared/lib/tests/arbitrary/URL.arbitrary.js"; import { UserArb } from "@liexp/shared/lib/tests/arbitrary/User.arbitrary.js"; @@ -15,15 +11,12 @@ import { sanitizeURL } from "@liexp/shared/lib/utils/url.utils.js"; import { fc } from "@liexp/test"; import debug from "debug"; import type TelegramBot from "node-telegram-bot-api"; -import { afterAll, beforeAll, describe, expect, test, vi } from "vitest"; -import { GetFSClient } from "../../../providers/fs/fs.provider.js"; -import { GetDatabaseClient } from "../../../providers/orm/database.provider.js"; -import { GetPuppeteerProvider } from "../../../providers/puppeteer.provider.js"; -import { EventsConfig } from "../../../queries/config/index.js"; +import { beforeAll, describe, expect, test, vi } from "vitest"; import { TGMessageArb, TGPhotoArb, } from "../../../test/arbitraries/TGMessage.arb.js"; +import { initContext } from "../../../test/index.js"; import puppeteerMocks from "../../../test/mocks/puppeteer.mock.js"; import { mocks } from "../../../test/mocks.js"; import { type UserTest } from "../../../test/user.utils.js"; @@ -43,25 +36,7 @@ interface MessageTest { describe("Create From TG Message", () => { let admin: UserTest; - const ctx = { - db: GetDatabaseClient({ - ...mocks.db, - logger: GetLogger("test"), - }), - tg: mocks.tg, - http: HTTPProvider(mocks.axios as any), - logger: GetLogger("test"), - imgProc: {} as any, - s3: mocks.s3 as any, - env: {} as any, - urlMetadata: mocks.urlMetadata, - queue: mocks.queueFS as any, - config: { events: EventsConfig } as any, - fs: GetFSClient(), - ffmpeg: {} as any, - pdf: PDFProvider({ client: mocks.pdf }), - puppeteer: GetPuppeteerProvider(mocks.puppeteer, {}, {} as any), - }; + const ctx = initContext(); beforeAll(() => { [admin] = fc.sample(UserArb, 1).map((u) => ({ @@ -69,17 +44,10 @@ describe("Create From TG Message", () => { password: "password", permissions: [AdminCreate.value], })); - if (!fs.existsSync(tempDir)) { - fs.mkdirSync(tempDir, { recursive: true }); - } // admin = await saveUser(ctx, [AdminCreate.value]); }); - afterAll(() => { - fs.rmSync(tempDir, { recursive: true }); - }); - describe.skip("createEventSuggestion", () => { test( "succeeds when link is not yet present in db", @@ -211,11 +179,10 @@ describe("Create From TG Message", () => { tempDir, `${message.message_id}.png`, ); - fs.writeFileSync(tempFileLocation, new Uint8Array(10)); // mock tg download mocks.tg.getFileStream.mockImplementationOnce(() => - fp.TE.right(fs.createReadStream(tempFileLocation)), + fp.TE.right(mocks.fs.createReadStream(tempFileLocation)), ); // mock puppeteer goto @@ -502,13 +469,11 @@ describe("Create From TG Message", () => { test.skip.each(messageCases)( "Running message case $n", async (c: MessageTest) => { - const message = pipe( - fs.readFileSync( - path.resolve(__dirname, `../../../../temp/tg/messages/${c.n}.json`), - "utf-8", - ), - JSON.parse, - ); + const message = { + message_id: c.n, + date: new Date().getTime(), + chat: {} as any, + }; puppeteerMocks.page.emulate.mockReset().mockResolvedValueOnce({}); const urls = c.urls(message); @@ -536,11 +501,9 @@ describe("Create From TG Message", () => { `${message.message_id}.png`, ); - fs.writeFileSync(tempFileLocation, new Uint8Array(10)); - // mock tg download mocks.tg.api.getFileStream.mockImplementationOnce(() => - fs.createReadStream(tempFileLocation), + mocks.fs.createReadStream(tempFileLocation), ); // mock s3 upload @@ -558,13 +521,13 @@ describe("Create From TG Message", () => { `${message.message_id}.png`, ); - fs.writeFileSync(tempFileLocation, new Uint8Array(10)); - // mock tg download mocks.tg.api.getFileStream - .mockImplementationOnce(() => fs.createReadStream(tempFileLocation)) .mockImplementationOnce(() => - fs.createReadStream(tempFileLocation), + mocks.fs.createReadStream(tempFileLocation), + ) + .mockImplementationOnce(() => + mocks.fs.createReadStream(tempFileLocation), ); // mock s3 upload diff --git a/packages/@liexp/backend/src/flows/tg/__tests__/upsertPinnedMessage.flow.spec.ts b/packages/@liexp/backend/src/flows/tg/__tests__/upsertPinnedMessage.flow.spec.ts index 44bfc7429c..d6f4bc8611 100644 --- a/packages/@liexp/backend/src/flows/tg/__tests__/upsertPinnedMessage.flow.spec.ts +++ b/packages/@liexp/backend/src/flows/tg/__tests__/upsertPinnedMessage.flow.spec.ts @@ -3,14 +3,21 @@ import { ActorArb, UncategorizedArb } from "@liexp/shared/lib/tests/index.js"; import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; import { fc } from "@liexp/test"; import * as E from "fp-ts/lib/Either.js"; -import { describe, test } from "vitest"; +import { describe, expect, test } from "vitest"; import { ActorEntity } from "../../../entities/Actor.entity.js"; +import { EventV2Entity } from "../../../entities/Event.v2.entity.js"; +import { KeywordEntity } from "../../../entities/Keyword.entity.js"; +import { initContext } from "../../../test/index.js"; +import { mocks } from "../../../test/mocks.js"; import { toPinnedMessage, upsertPinnedMessage, } from "../upsertPinnedMessage.flow.js"; describe("Upsert Pinned Message Flow", () => { + const Test = { + ctx: initContext(), + }; test.skip("Should upsert the message with 5 keywords", async () => { const keywordCount = 10; const actorCount = 10; @@ -43,7 +50,7 @@ describe("Upsert Pinned Message Flow", () => { ), ); - Test.mocks.tg.upsertPinnedMessage.mockImplementationOnce( + mocks.tg.upsertPinnedMessage.mockImplementationOnce( (text) => () => Promise.resolve(E.right({ message_id: 1, text })), ); diff --git a/packages/@liexp/backend/src/flows/tg/createFromTGMessage.flow.ts b/packages/@liexp/backend/src/flows/tg/createFromTGMessage.flow.ts index 678504a3f8..66a3f5a663 100644 --- a/packages/@liexp/backend/src/flows/tg/createFromTGMessage.flow.ts +++ b/packages/@liexp/backend/src/flows/tg/createFromTGMessage.flow.ts @@ -10,7 +10,6 @@ import { type ENVContext } from "../../context/env.context.js"; import { type FSClientContext } from "../../context/fs.context.js"; import { type HTTPProviderContext } from "../../context/http.context.js"; import { - type URLMetadataContext, type ImgProcClientContext, type TGBotProviderContext, type FFMPEGProviderContext, @@ -20,6 +19,7 @@ import { type PDFProviderContext } from "../../context/pdf.context.js"; import { type PuppeteerProviderContext } from "../../context/puppeteer.context.js"; import { type QueuesProviderContext } from "../../context/queue.context.js"; import { type SpaceContext } from "../../context/space.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { type PuppeteerError, toPuppeteerError, diff --git a/packages/@liexp/backend/src/flows/tg/parseMessages.flow.ts b/packages/@liexp/backend/src/flows/tg/parseMessages.flow.ts index 4b7f080033..fbc312f318 100644 --- a/packages/@liexp/backend/src/flows/tg/parseMessages.flow.ts +++ b/packages/@liexp/backend/src/flows/tg/parseMessages.flow.ts @@ -7,7 +7,6 @@ import { type ENVContext } from "../../context/env.context.js"; import { type FSClientContext } from "../../context/fs.context.js"; import { type HTTPProviderContext } from "../../context/http.context.js"; import { - type URLMetadataContext, type ImgProcClientContext, type TGBotProviderContext, type FFMPEGProviderContext, @@ -17,6 +16,7 @@ import { type PDFProviderContext } from "../../context/pdf.context.js"; import { type PuppeteerProviderContext } from "../../context/puppeteer.context.js"; import { type QueuesProviderContext } from "../../context/queue.context.js"; import { type SpaceContext } from "../../context/space.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { toTGError, type TGError } from "../../providers/tg/tg.provider.js"; import { LoggerService } from "../../services/logger/logger.service.js"; import { diff --git a/packages/@liexp/backend/src/flows/tg/parseURL.flow.ts b/packages/@liexp/backend/src/flows/tg/parseURL.flow.ts index 604e04d8e2..d3221ace94 100644 --- a/packages/@liexp/backend/src/flows/tg/parseURL.flow.ts +++ b/packages/@liexp/backend/src/flows/tg/parseURL.flow.ts @@ -16,11 +16,11 @@ import type * as puppeteer from "puppeteer-core"; import { Equal } from "typeorm"; import { type DatabaseContext } from "../../context/db.context.js"; import { type ENVContext } from "../../context/env.context.js"; -import { type URLMetadataContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; import { type PuppeteerProviderContext } from "../../context/puppeteer.context.js"; import { type QueuesProviderContext } from "../../context/queue.context.js"; import { type SpaceContext } from "../../context/space.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { LinkEntity } from "../../entities/Link.entity.js"; import { type UserEntity } from "../../entities/User.entity.js"; import { ServerError } from "../../errors/index.js"; diff --git a/packages/@liexp/backend/src/providers/URLMetadata.provider.ts b/packages/@liexp/backend/src/providers/URLMetadata.provider.ts index 08c047ef66..668931fde0 100644 --- a/packages/@liexp/backend/src/providers/URLMetadata.provider.ts +++ b/packages/@liexp/backend/src/providers/URLMetadata.provider.ts @@ -27,7 +27,7 @@ export interface URLMetadataClient { ) => TE.TaskEither; } -interface MakeURLMetadataContext { +export interface MakeURLMetadataContext { client: AxiosInstance; parser: { getMetadata: (dom: Document, url: string, opts?: any) => Metadata; diff --git a/packages/@liexp/backend/src/providers/ffmpeg/ffmpeg.provider.spec.ts b/packages/@liexp/backend/src/providers/ffmpeg/ffmpeg.provider.spec.ts index 696d558ce0..246b1dbf13 100644 --- a/packages/@liexp/backend/src/providers/ffmpeg/ffmpeg.provider.spec.ts +++ b/packages/@liexp/backend/src/providers/ffmpeg/ffmpeg.provider.spec.ts @@ -1,7 +1,7 @@ import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; import type ffmpeg from "fluent-ffmpeg"; import { pipe } from "fp-ts/lib/function"; -import { describe, test, beforeEach } from "vitest"; +import { describe, test, beforeEach, expect } from "vitest"; import { mock } from "vitest-mock-extended"; import { GetFFMPEGProvider } from "./ffmpeg.provider"; diff --git a/packages/@liexp/backend/src/providers/fs/fs.provider.ts b/packages/@liexp/backend/src/providers/fs/fs.provider.ts index 47f466dd8f..180dca1a92 100644 --- a/packages/@liexp/backend/src/providers/fs/fs.provider.ts +++ b/packages/@liexp/backend/src/providers/fs/fs.provider.ts @@ -1,4 +1,4 @@ -import fs from "fs"; +import type * as fs from "fs"; import path from "path"; import { fp } from "@liexp/core/lib/fp/index.js"; import * as logger from "@liexp/core/lib/logger/index.js"; @@ -29,6 +29,10 @@ export const toFSError = (e: unknown): FSError => { }); }; +export interface GetFSClientContext { + client: typeof fs; +} + export interface FSClient { _fs: typeof fs; resolve: (filePath: string) => string; @@ -41,21 +45,21 @@ export interface FSClient { ) => TE.TaskEither; } -export const GetFSClient = (): FSClient => { +export const GetFSClient = (ctx: GetFSClientContext): FSClient => { const objectExists: FSClient["objectExists"] = (filePath) => { return TE.fromIOEither( fp.IOE.tryCatch(() => { const filePathDir = path.dirname(filePath); - const tempFolderExists = fs.existsSync(filePathDir); + const tempFolderExists = ctx.client.existsSync(filePathDir); if (!tempFolderExists) { fsLogger.debug.log( "Folder %s does not exist, creating...", filePathDir, ); - fs.mkdirSync(filePathDir, { recursive: true }); + ctx.client.mkdirSync(filePathDir, { recursive: true }); } - const statsExists = fs.existsSync(filePath); + const statsExists = ctx.client.existsSync(filePath); // fsLogger.debug.log( // "Network file path %s exists? %s", // path.relative(process.cwd(), filePath), @@ -69,7 +73,10 @@ export const GetFSClient = (): FSClient => { const getObject: FSClient["getObject"] = (filePath) => { fsLogger.debug.log("Getting object from path %s", filePath); return pipe( - fp.IOE.tryCatch(() => fs.readFileSync(filePath, "utf-8"), toFSError), + fp.IOE.tryCatch( + () => ctx.client.readFileSync(filePath, "utf-8"), + toFSError, + ), fp.TE.fromIOEither, ); }; @@ -79,14 +86,14 @@ export const GetFSClient = (): FSClient => { return pipe( TE.fromIOEither( fp.IOE.tryCatch(() => { - fs.writeFileSync(filePath, data, "utf-8"); + ctx.client.writeFileSync(filePath, data, "utf-8"); }, toFSError), ), ); }; return { - _fs: fs, + _fs: ctx.client, resolve: (p) => path.resolve(process.cwd(), p), objectExists, writeObject, @@ -98,7 +105,7 @@ export const GetFSClient = (): FSClient => { TE.chain((exists) => { if (exists) { return TE.fromIO(() => { - fs.rmSync(filePath); + ctx.client.rmSync(filePath); }); } else { if (throwIfNoExists) { diff --git a/packages/@liexp/backend/src/providers/ner/ner.provider.ts b/packages/@liexp/backend/src/providers/ner/ner.provider.ts index 78357a2f4d..4c131de907 100644 --- a/packages/@liexp/backend/src/providers/ner/ner.provider.ts +++ b/packages/@liexp/backend/src/providers/ner/ner.provider.ts @@ -67,13 +67,14 @@ interface NERProviderContext { export const GetNERProvider = ({ entitiesFile, - logger, + logger: _logger, nlp: winkNLP, }: NERProviderContext): NERProvider => { + const logger = _logger.extend("ner"); return { entitiesFile, process: (text, patterns) => { - logger.debug.log("Looking for %O", patterns); + logger.debug.log("Looking for patterns %O", patterns); return pipe( fp.IOE.tryCatch((): NERResults => { const nlp = winkNLP(model); diff --git a/packages/@liexp/backend/src/providers/puppeteer.provider.ts b/packages/@liexp/backend/src/providers/puppeteer.provider.ts index 2ffec4a837..3f3a73e81c 100644 --- a/packages/@liexp/backend/src/providers/puppeteer.provider.ts +++ b/packages/@liexp/backend/src/providers/puppeteer.provider.ts @@ -5,8 +5,7 @@ import * as E from "fp-ts/lib/Either.js"; import * as TE from "fp-ts/lib/TaskEither.js"; import { pipe } from "fp-ts/lib/function.js"; import type * as puppeteer from "puppeteer-core"; -import { addExtra, type VanillaPuppeteer } from "puppeteer-extra"; -import puppeteerStealth from "puppeteer-extra-plugin-stealth"; +import { type VanillaPuppeteer } from "puppeteer-extra"; import { IOError } from "ts-io-error"; const puppeteerLogger = logger.GetLogger("puppeteer"); @@ -134,8 +133,8 @@ export type GetPuppeteerProvider = ( browser: puppeteer.Browser, ) => PuppeteerProvider; -export const GetPuppeteerProvider =

( - pup: P, +export const GetPuppeteerProvider = ( + pup: VanillaPuppeteer, defaultOpts: BrowserLaunchOpts, devices: typeof puppeteer.KnownDevices, ): PuppeteerProvider => { @@ -151,9 +150,6 @@ export const GetPuppeteerProvider =

( TE.fromEither, TE.chain((executablePath) => { return TE.tryCatch(async () => { - const p = addExtra(pup); - p.use(puppeteerStealth()); - const options = { executablePath, headless: true, @@ -165,7 +161,7 @@ export const GetPuppeteerProvider =

( puppeteerLogger.info.log("Launching browser with %O", options); - const b = await p.launch(options); + const b = await pup.launch(options); return b as puppeteer.Browser; }, toPuppeteerError); }), diff --git a/packages/@liexp/backend/src/providers/redis/RedisPubSub.ts b/packages/@liexp/backend/src/providers/redis/RedisPubSub.ts new file mode 100644 index 0000000000..41527277ad --- /dev/null +++ b/packages/@liexp/backend/src/providers/redis/RedisPubSub.ts @@ -0,0 +1,44 @@ +import { fp } from "@liexp/core/lib/fp/index.js"; +import type { ReaderTaskEither } from "fp-ts/lib/ReaderTaskEither.js"; +import * as TE from "fp-ts/lib/TaskEither.js"; +import { pipe } from "fp-ts/lib/function.js"; +import type { Decoder } from "io-ts"; +import type { LoggerContext } from "../../context/logger.context.js"; +import type { RedisContext } from "../../context/redis.context.js"; +import { type RedisError, toRedisError } from "./redis.error.js"; + +export interface RedisPubSub { + channel: K; + decoder: Decoder; + publish: ( + message: T, + ) => ReaderTaskEither; +} + +export const RedisPubSub = ( + channel: K, + decoder: Decoder, +): RedisPubSub => { + return { + channel, + decoder, + publish: + (message) => + ({ redis, logger }) => { + return pipe( + TE.tryCatch(async () => { + const count = await redis.publish(channel, JSON.stringify(message)); + logger.debug.log(`Published message to channel ${channel}`); + return count; + }, toRedisError), + fp.TE.filterOrElse( + (c) => c > 0, + () => + toRedisError( + new Error(`Failed to publish message on channel: ${channel}`), + ), + ), + ); + }, + }; +}; diff --git a/packages/@liexp/backend/src/providers/redis/Subscriber.ts b/packages/@liexp/backend/src/providers/redis/Subscriber.ts new file mode 100644 index 0000000000..b98a7a5b49 --- /dev/null +++ b/packages/@liexp/backend/src/providers/redis/Subscriber.ts @@ -0,0 +1,74 @@ +import { fp } from "@liexp/core/lib/fp/index.js"; +import type { ReaderTaskEither } from "fp-ts/lib/ReaderTaskEither.js"; +import * as TE from "fp-ts/lib/TaskEither.js"; +import { pipe } from "fp-ts/lib/function.js"; +import { failure } from "io-ts/lib/PathReporter.js"; +import type { LoggerContext } from "../../context/logger.context.js"; +import type { RedisContext } from "../../context/redis.context.js"; +import type { RedisPubSub } from "./RedisPubSub.js"; +import { RedisError, toRedisError } from "./redis.error.js"; + +export interface Subscriber + extends RedisPubSub { + subscribe: () => ReaderTaskEither; +} + +export const Subscriber = < + C extends LoggerContext & RedisContext, + E, + T, + K extends string, +>( + pubSub: RedisPubSub, + subscribe: (payload: T) => ReaderTaskEither, +): Subscriber => ({ + ...pubSub, + subscribe: () => (ctx) => { + return pipe( + TE.tryCatch(async () => { + await ctx.redis.subscribe(pubSub.channel); + ctx.logger.debug.log(`Subscribed to channel ${pubSub.channel}`); + }, toRedisError), + fp.RTE.fromTaskEither, + fp.RTE.map(() => { + ctx.redis.on("message", (boundChannel, message) => { + if (boundChannel !== pubSub.channel) { + return; + } + + ctx.logger.debug.log(`Received message on channel ${pubSub.channel}`); + void pipe( + pubSub.decoder.decode(JSON.parse(message)), + fp.E.mapLeft( + (err) => + new RedisError("Failed to decode message", { + kind: "DecodingError", + errors: failure(err), + }) as E, + ), + fp.RTE.fromEither, + fp.RTE.chainTaskEitherK((message) => subscribe(message)(ctx)), + fp.RTE.fold( + (e) => { + ctx.logger.error.log( + `Handling message for channel %s failed: %O`, + pubSub.channel, + e, + ); + return () => fp.T.of(undefined); + }, + () => { + ctx.logger.debug.log( + `Message handled successfully for channel %s`, + pubSub.channel, + ); + return () => fp.T.of(undefined); + }, + ), + )(ctx)(); + }); + }), + )(ctx); + }, + // task, +}); diff --git a/packages/@liexp/backend/src/providers/redis/redis.error.ts b/packages/@liexp/backend/src/providers/redis/redis.error.ts new file mode 100644 index 0000000000..9def84b204 --- /dev/null +++ b/packages/@liexp/backend/src/providers/redis/redis.error.ts @@ -0,0 +1,25 @@ +import { IOError } from "ts-io-error"; + +export class RedisError extends IOError { + name = "RedisError"; +} + +export const toRedisError = (e: unknown): RedisError => { + if (e instanceof IOError) { + return e as RedisError; + } + + if (e instanceof Error) { + return new RedisError(e.message, { + kind: "ServerError", + status: "500", + meta: [e.name, e.stack], + }); + } + + return new RedisError("An error occurred", { + kind: "ServerError", + status: "500", + meta: [String(e)], + }); +}; diff --git a/packages/@liexp/backend/src/providers/redis/redis.provider.ts b/packages/@liexp/backend/src/providers/redis/redis.provider.ts index b74792f632..c5a8f7031b 100644 --- a/packages/@liexp/backend/src/providers/redis/redis.provider.ts +++ b/packages/@liexp/backend/src/providers/redis/redis.provider.ts @@ -1,133 +1,22 @@ -import { fp, pipe } from "@liexp/core/lib/fp/index.js"; -import { type ReaderTaskEither } from "fp-ts/lib/ReaderTaskEither.js"; -import * as TE from "fp-ts/lib/TaskEither.js"; -import { type Decoder } from "io-ts"; -import { failure } from "io-ts/lib/PathReporter.js"; -import { IOError } from "ts-io-error"; -import { type LoggerContext } from "../../context/logger.context.js"; -import { type RedisContext } from "context/redis.context.js"; - -export class RedisError extends IOError { - name = "RedisError"; +import { fp } from "@liexp/core/lib/fp/index.js"; +import Redis from "ioredis"; +import { toRedisError } from "./redis.error.js"; + +interface RedisClientContext { + client: Redis; + host: string; + port: number; + lazyConnect?: boolean; } -const toRedisError = (e: unknown): RedisError => { - if (e instanceof IOError) { - return e as RedisError; - } - - if (e instanceof Error) { - return new RedisError(e.message, { - kind: "ServerError", - status: "500", - meta: [e.name, e.stack], +export const RedisClient = (ctx: RedisClientContext) => + fp.TE.tryCatch(async () => { + const redis = new Redis(6379, ctx.host, { + lazyConnect: ctx.lazyConnect, }); - } - - return new RedisError("An error occurred", { - kind: "ServerError", - status: "500", - meta: [String(e)], - }); -}; - -export interface RedisPubSub { - channel: K; - decoder: Decoder; - publish: ( - message: T, - ) => ReaderTaskEither; -} - -export const RedisPubSub = ( - channel: K, - decoder: Decoder, -): RedisPubSub => { - return { - channel, - decoder, - publish: - (message) => - ({ redis, logger }) => { - return pipe( - TE.tryCatch(async () => { - const count = await redis.publish(channel, JSON.stringify(message)); - logger.debug.log(`Published message to channel ${channel}`); - return count; - }, toRedisError), - fp.TE.filterOrElse( - (c) => c > 0, - () => - toRedisError( - new Error(`Failed to publish message on channel: ${channel}`), - ), - ), - ); - }, - }; -}; - -export interface Subscriber - extends RedisPubSub { - subscribe: () => ReaderTaskEither; -} - -export const Subscriber = < - C extends LoggerContext & RedisContext, - E, - T, - K extends string, ->( - pubSub: RedisPubSub, - subscribe: (payload: T) => ReaderTaskEither, -): Subscriber => ({ - ...pubSub, - subscribe: () => (ctx) => { - return pipe( - TE.tryCatch(async () => { - await ctx.redis.subscribe(pubSub.channel); - ctx.logger.debug.log(`Subscribed to channel ${pubSub.channel}`); - }, toRedisError), - fp.RTE.fromTaskEither, - fp.RTE.map(() => { - ctx.redis.on("message", (boundChannel, message) => { - if (boundChannel !== pubSub.channel) { - return; - } - ctx.logger.debug.log(`Received message on channel ${pubSub.channel}`); - void pipe( - pubSub.decoder.decode(JSON.parse(message)), - fp.E.mapLeft( - (err) => - new RedisError("Failed to decode message", { - kind: "DecodingError", - errors: failure(err), - }) as E, - ), - fp.RTE.fromEither, - fp.RTE.chainTaskEitherK((message) => subscribe(message)(ctx)), - fp.RTE.fold( - (e) => { - ctx.logger.error.log( - `Handling message for channel %s failed: %O`, - pubSub.channel, - e, - ); - return () => fp.T.of(undefined); - }, - () => { - ctx.logger.debug.log( - `Message handled successfully for channel %s`, - pubSub.channel, - ); - return () => fp.T.of(undefined); - }, - ), - )(ctx)(); - }); - }), - )(ctx); - }, - // task, -}); + if (ctx.lazyConnect) { + await redis.connect(); + } + return redis; + }, toRedisError); diff --git a/packages/@liexp/backend/src/providers/space/creates3.provider.ts b/packages/@liexp/backend/src/providers/space/creates3ProviderConfig.ts similarity index 68% rename from packages/@liexp/backend/src/providers/space/creates3.provider.ts rename to packages/@liexp/backend/src/providers/space/creates3ProviderConfig.ts index 62e4c33335..953c780047 100644 --- a/packages/@liexp/backend/src/providers/space/creates3.provider.ts +++ b/packages/@liexp/backend/src/providers/space/creates3ProviderConfig.ts @@ -1,7 +1,8 @@ -import { type S3ClientConfig } from "@aws-sdk/client-s3"; +import { S3Client, type S3ClientConfig } from "@aws-sdk/client-s3"; +import { Upload } from "@aws-sdk/lib-storage"; +import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; import { type NODE_ENV } from "@liexp/core/lib/env/node-env.js"; -import { GetS3Provider } from "./s3.provider.js"; -import { type SpaceProvider } from "./space.provider.js"; +import { type MakeSpaceProviderConfig } from "./space.provider.js"; interface ENV { NODE_ENV: NODE_ENV; @@ -11,7 +12,9 @@ interface ENV { SPACE_ACCESS_KEY_SECRET: string; } -export const createS3Provider = (env: E): SpaceProvider => { +export const createS3ProviderConfig = ( + env: E, +): MakeSpaceProviderConfig => { const config: S3ClientConfig = env.NODE_ENV === "development" || env.NODE_ENV === "test" ? { @@ -34,5 +37,9 @@ export const createS3Provider = (env: E): SpaceProvider => { tls: true, }; - return GetS3Provider(config); + return { + client: new S3Client(config), + getSignedUrl, + classes: { Upload }, + }; }; diff --git a/packages/@liexp/backend/src/providers/tg/tg.provider.ts b/packages/@liexp/backend/src/providers/tg/tg.provider.ts index 285cd2b059..65b96f35bb 100644 --- a/packages/@liexp/backend/src/providers/tg/tg.provider.ts +++ b/packages/@liexp/backend/src/providers/tg/tg.provider.ts @@ -3,7 +3,8 @@ import { pipe, fp } from "@liexp/core/lib/fp/index.js"; import { type Logger } from "@liexp/core/lib/logger/index.js"; import { MP4Type, PDFType } from "@liexp/shared/lib/io/http/Media/MediaType.js"; import * as TE from "fp-ts/lib/TaskEither.js"; -import TelegramBot from "node-telegram-bot-api"; +import type TelegramBot from "node-telegram-bot-api"; +import { type ConstructorOptions } from "node-telegram-bot-api"; import { IOError } from "../../errors/index.js"; export interface TGBotProvider { @@ -76,11 +77,12 @@ const liftTGTE = (p: () => Promise): TE.TaskEither => { }; export interface TGBotProviderCtx { + client: (token: string, options: ConstructorOptions) => TelegramBot; logger: Logger; } export const TGBotProvider = ( - { logger }: TGBotProviderCtx, + { client, logger }: TGBotProviderCtx, opts: TGBotProviderOpts, ): TGBotProvider => { const encryptedToken = opts.token @@ -97,7 +99,7 @@ export const TGBotProvider = ( ...opts, token: encryptedToken, }); - const api = new TelegramBot(opts.token, { + const api = client(opts.token, { polling: opts.polling, baseApiUrl: opts.baseApiUrl, }); diff --git a/packages/@liexp/backend/src/pubsub/buildImageWithSharp.pubSub.ts b/packages/@liexp/backend/src/pubsub/buildImageWithSharp.pubSub.ts index 5d0f257c8d..d80d431e9e 100644 --- a/packages/@liexp/backend/src/pubsub/buildImageWithSharp.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/buildImageWithSharp.pubSub.ts @@ -1,8 +1,12 @@ +import { BuildImageLayer } from "@liexp/shared/lib/io/http/admin/BuildImage.js"; import * as t from "io-ts"; import { UUID } from "io-ts-types"; -import { RedisPubSub } from "../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../providers/redis/RedisPubSub.js"; export const BuildImageWithSharpPubSub = RedisPubSub( "image:build-with-sharp", - t.strict({ id: UUID }), + t.strict({ + image: t.union([UUID, t.null]), + layers: t.array(BuildImageLayer), + }), ); diff --git a/packages/@liexp/backend/src/pubsub/events/createEventFromURL.pubSub.ts b/packages/@liexp/backend/src/pubsub/events/createEventFromURL.pubSub.ts index a4020e84ee..dfe1dc8b65 100644 --- a/packages/@liexp/backend/src/pubsub/events/createEventFromURL.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/events/createEventFromURL.pubSub.ts @@ -2,7 +2,7 @@ import { URL } from "@liexp/shared/lib/io/http/Common/URL.js"; import { UUID } from "@liexp/shared/lib/io/http/Common/UUID.js"; import { EventType } from "@liexp/shared/lib/io/http/Events/EventType.js"; import * as t from "io-ts"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const CreateEventFromURLPubSub = RedisPubSub( "event:create-from-url", diff --git a/packages/@liexp/backend/src/pubsub/jobs/processJobDone.pubSub.ts b/packages/@liexp/backend/src/pubsub/jobs/processJobDone.pubSub.ts index e4164a2d59..da3b488dc4 100644 --- a/packages/@liexp/backend/src/pubsub/jobs/processJobDone.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/jobs/processJobDone.pubSub.ts @@ -1,5 +1,5 @@ import * as t from "io-ts"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const ProcessJobDonePubSub = RedisPubSub( "job:process-done", diff --git a/packages/@liexp/backend/src/pubsub/media/createThumbnail.pubSub.ts b/packages/@liexp/backend/src/pubsub/media/createThumbnail.pubSub.ts index 5bf3456e83..36eb4af4df 100644 --- a/packages/@liexp/backend/src/pubsub/media/createThumbnail.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/media/createThumbnail.pubSub.ts @@ -1,6 +1,6 @@ import { Media } from "@liexp/shared/lib/io/http/Media/index.js"; import * as t from "io-ts"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; const { id, location, thumbnail, type } = Media.type.props; diff --git a/packages/@liexp/backend/src/pubsub/media/extractMediaExtra.pubSub.ts b/packages/@liexp/backend/src/pubsub/media/extractMediaExtra.pubSub.ts index a75222cbcb..efde290522 100644 --- a/packages/@liexp/backend/src/pubsub/media/extractMediaExtra.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/media/extractMediaExtra.pubSub.ts @@ -1,6 +1,6 @@ import * as t from "io-ts"; import { UUID } from "io-ts-types"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const ExtractMediaExtraPubSub = RedisPubSub( "media:extract-media-extra", diff --git a/packages/@liexp/backend/src/pubsub/media/generateThumbnail.pubSub.ts b/packages/@liexp/backend/src/pubsub/media/generateThumbnail.pubSub.ts index 12fd53a18f..ae80858ed2 100644 --- a/packages/@liexp/backend/src/pubsub/media/generateThumbnail.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/media/generateThumbnail.pubSub.ts @@ -1,6 +1,6 @@ import * as t from "io-ts"; import { UUID } from "io-ts-types"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const GenerateThumbnailPubSub = RedisPubSub( "media:generate-thumbnail", diff --git a/packages/@liexp/backend/src/pubsub/media/transferFromExternalProvider.pubSub.ts b/packages/@liexp/backend/src/pubsub/media/transferFromExternalProvider.pubSub.ts index 7d3a54424d..5d02be4dfe 100644 --- a/packages/@liexp/backend/src/pubsub/media/transferFromExternalProvider.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/media/transferFromExternalProvider.pubSub.ts @@ -1,7 +1,7 @@ import { MediaType } from "@liexp/shared/lib/io/http/Media/MediaType.js"; import * as t from "io-ts"; import { UUID } from "io-ts-types"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const TransferMediaFromExternalProviderPubSub = RedisPubSub( "media:transfer-from-external-provider", diff --git a/packages/@liexp/backend/src/pubsub/nlp/extractEntitiesWithNLP.pubSub.ts b/packages/@liexp/backend/src/pubsub/nlp/extractEntitiesWithNLP.pubSub.ts index 49ae1f6a79..d8f0cb8dab 100644 --- a/packages/@liexp/backend/src/pubsub/nlp/extractEntitiesWithNLP.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/nlp/extractEntitiesWithNLP.pubSub.ts @@ -1,5 +1,5 @@ import { ExtractEntitiesWithNLPInput } from "@liexp/shared/lib/io/http/admin/ExtractNLPEntities.js"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const ExtractEntitiesWithNLP = RedisPubSub( "nlp:extract-entities", diff --git a/packages/@liexp/backend/src/pubsub/postToSocialPlatforms.pubSub.ts b/packages/@liexp/backend/src/pubsub/postToSocialPlatforms.pubSub.ts index 1083f9bb25..f81a0a743d 100644 --- a/packages/@liexp/backend/src/pubsub/postToSocialPlatforms.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/postToSocialPlatforms.pubSub.ts @@ -1,7 +1,7 @@ import { CreateSocialPost } from "@liexp/shared/lib/io/http/SocialPost.js"; import * as t from "io-ts"; import { UUID } from "io-ts-types"; -import { RedisPubSub } from "../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../providers/redis/RedisPubSub.js"; export const PostToSocialPlatformsPubSub = RedisPubSub( "post-social-post", diff --git a/packages/@liexp/backend/src/pubsub/searchFromWikipedia.pubSub.ts b/packages/@liexp/backend/src/pubsub/searchFromWikipedia.pubSub.ts index fa0066fb10..f8626810b9 100644 --- a/packages/@liexp/backend/src/pubsub/searchFromWikipedia.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/searchFromWikipedia.pubSub.ts @@ -1,6 +1,6 @@ import { ACTOR, GROUP } from "@liexp/shared/lib/io/http/Common/index.js"; import * as t from "io-ts"; -import { RedisPubSub } from "../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../providers/redis/RedisPubSub.js"; export const SearchFromWikipediaPubSub = RedisPubSub( "search:search-from-wikipedia", diff --git a/packages/@liexp/backend/src/pubsub/stats/createEntityStats.pubSub.ts b/packages/@liexp/backend/src/pubsub/stats/createEntityStats.pubSub.ts index 321b8629b6..36ee88c393 100644 --- a/packages/@liexp/backend/src/pubsub/stats/createEntityStats.pubSub.ts +++ b/packages/@liexp/backend/src/pubsub/stats/createEntityStats.pubSub.ts @@ -1,5 +1,5 @@ import * as t from "io-ts"; -import { RedisPubSub } from "../../providers/redis/redis.provider.js"; +import { RedisPubSub } from "../../providers/redis/RedisPubSub.js"; export const CreateEntityStatsPubSub = RedisPubSub( "stats:create-entity", diff --git a/packages/@liexp/backend/src/queries/events/createEvent.query.ts b/packages/@liexp/backend/src/queries/events/createEvent.query.ts index 2e55a3adc9..03cd471158 100644 --- a/packages/@liexp/backend/src/queries/events/createEvent.query.ts +++ b/packages/@liexp/backend/src/queries/events/createEvent.query.ts @@ -5,8 +5,8 @@ import { type ReaderTaskEither } from "fp-ts/lib/ReaderTaskEither.js"; import * as TE from "fp-ts/lib/TaskEither.js"; import { type DeepPartial } from "typeorm"; import { type DatabaseContext } from "../../context/db.context.js"; -import { type URLMetadataContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { type EventV2Entity } from "../../entities/Event.v2.entity.js"; import { type DBError } from "../../providers/orm/database.provider.js"; import { fetchRelationIds } from "./fetchEventRelations.query.js"; diff --git a/packages/@liexp/backend/src/queries/events/editEvent.query.ts b/packages/@liexp/backend/src/queries/events/editEvent.query.ts index 476115170b..132f0fa497 100644 --- a/packages/@liexp/backend/src/queries/events/editEvent.query.ts +++ b/packages/@liexp/backend/src/queries/events/editEvent.query.ts @@ -4,8 +4,8 @@ import * as O from "fp-ts/lib/Option.js"; import * as TE from "fp-ts/lib/TaskEither.js"; import { type DeepPartial } from "typeorm"; import { type DatabaseContext } from "../../context/db.context.js"; -import { type URLMetadataContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { type EventV2Entity } from "../../entities/Event.v2.entity.js"; import { type DBError } from "../../providers/orm/database.provider.js"; import { optionalsToUndefined } from "../../utils/foldOptionals.utils.js"; diff --git a/packages/@liexp/backend/src/queries/events/fetchEventRelations.query.ts b/packages/@liexp/backend/src/queries/events/fetchEventRelations.query.ts index 2bfa92f3db..2f2be95d9d 100644 --- a/packages/@liexp/backend/src/queries/events/fetchEventRelations.query.ts +++ b/packages/@liexp/backend/src/queries/events/fetchEventRelations.query.ts @@ -12,8 +12,8 @@ import { type Int } from "io-ts"; import { type DeepPartial } from "typeorm"; import { type DatabaseContext } from "../../context/db.context.js"; import { type ENVContext } from "../../context/env.context.js"; -import { type URLMetadataContext } from "../../context/index.js"; import { type LoggerContext } from "../../context/logger.context.js"; +import { type URLMetadataContext } from "../../context/urlMetadata.context.js"; import { type ActorEntity } from "../../entities/Actor.entity.js"; import { type GroupEntity } from "../../entities/Group.entity.js"; import { type KeywordEntity } from "../../entities/Keyword.entity.js"; diff --git a/packages/@liexp/backend/src/test/index.ts b/packages/@liexp/backend/src/test/index.ts new file mode 100644 index 0000000000..74f055a372 --- /dev/null +++ b/packages/@liexp/backend/src/test/index.ts @@ -0,0 +1,121 @@ +import { GetLogger } from "@liexp/core/lib/index.js"; +import { HTTPProvider } from "@liexp/shared/lib/providers/http/http.provider.js"; +import { PDFProvider } from "@liexp/shared/lib/providers/pdf/pdf.provider.js"; +import D from "debug"; +import * as puppeteer from "puppeteer-core"; +import { type ConfigContext } from "../context/config.context.js"; +import { type DatabaseContext } from "../context/db.context.js"; +import { type ENVContext } from "../context/env.context.js"; +import { type FSClientContext } from "../context/fs.context.js"; +import { type HTTPProviderContext } from "../context/http.context.js"; +import { + type FFMPEGProviderContext, + type ImgProcClientContext, + type NERProviderContext, + type TGBotProviderContext, +} from "../context/index.js"; +import { type LoggerContext } from "../context/logger.context.js"; +import { type PDFProviderContext } from "../context/pdf.context.js"; +import { type PuppeteerProviderContext } from "../context/puppeteer.context.js"; +import { type QueuesProviderContext } from "../context/queue.context.js"; +import { type SpaceContext } from "../context/space.context.js"; +import { type URLMetadataContext } from "../context/urlMetadata.context.js"; +import { type BACKEND_ENV } from "../io/ENV.js"; +import { MakeURLMetadata } from "../providers/URLMetadata.provider.js"; +import { GetFFMPEGProvider } from "../providers/ffmpeg/ffmpeg.provider.js"; +import { GetFSClient } from "../providers/fs/fs.provider.js"; +import { MakeImgProcClient } from "../providers/imgproc/imgproc.provider.js"; +import { GetNERProvider } from "../providers/ner/ner.provider.js"; +import { GetDatabaseClient } from "../providers/orm/index.js"; +import { GetPuppeteerProvider } from "../providers/puppeteer.provider.js"; +import { GetQueueProvider } from "../providers/queue.provider.js"; +import { MakeSpaceProvider } from "../providers/space/space.provider.js"; +import { TGBotProvider } from "../providers/tg/tg.provider.js"; +import { EventsConfig } from "../queries/config/index.js"; +import { mocks } from "./mocks.js"; + +const pdfContext = PDFProvider({ client: mocks.pdf }); + +type TestContext = ENVContext & + PDFProviderContext & + TGBotProviderContext & + LoggerContext & + DatabaseContext & + PuppeteerProviderContext & + ConfigContext & + FSClientContext & + NERProviderContext & + URLMetadataContext & + HTTPProviderContext & + ImgProcClientContext & + SpaceContext & + QueuesProviderContext & + FFMPEGProviderContext; + +export const testConfig = { + dirs: { + cwd: "", + config: { nlp: "" }, + temp: { root: "", queue: "", stats: "", nlp: "", media: "" }, + }, + media: { + thumbnailHeight: 0, + thumbnailWidth: 0, + }, + events: EventsConfig, +}; + +export const initContext = (): TestContext => { + D.enable(process.env.DEBUG ?? "*"); + + const logger = GetLogger("test"); + + const fs = GetFSClient({ client: mocks.fs }); + + const ctx = { + env: process.env as any as BACKEND_ENV, + db: GetDatabaseClient({ + connection: mocks.db.connection, + logger, + }), + fs, + s3: MakeSpaceProvider({ + client: mocks.s3.client as any, + getSignedUrl: mocks.s3.getSignedUrl, + classes: mocks.s3.classes as any, + }), + config: testConfig, + pdf: pdfContext, + puppeteer: GetPuppeteerProvider( + mocks.puppeteer, + {}, + puppeteer.KnownDevices, + ), + ffmpeg: GetFFMPEGProvider(mocks.ffmpeg), + queue: GetQueueProvider(fs, "fake-queue"), + http: HTTPProvider(mocks.axios as any), + ner: GetNERProvider({ + nlp: mocks.ner, + entitiesFile: "fake", + logger, + }), + urlMetadata: MakeURLMetadata({ + client: mocks.urlMetadata.fetchHTML as any, + parser: { + getMetadata: mocks.urlMetadata.fetchMetadata, + }, + }), + tg: TGBotProvider( + { logger: logger, client: () => mocks.tg as any }, + { token: "fake", chat: "fake", polling: false, baseApiUrl: "fake" }, + ), + imgProc: MakeImgProcClient({ + logger: logger.extend("imgproc"), + client: mocks.sharp, + exifR: mocks.exifR, + }), + logger: GetLogger("test"), + }; + + return ctx; +}; diff --git a/packages/@liexp/backend/src/test/mocks.ts b/packages/@liexp/backend/src/test/mocks.ts index 5e8e199a6c..1d2e1a07f6 100644 --- a/packages/@liexp/backend/src/test/mocks.ts +++ b/packages/@liexp/backend/src/test/mocks.ts @@ -1,7 +1,10 @@ +import { type AxiosInstance } from "axios"; import { type Mock, vi } from "vitest"; +import { mock, type MockProxy } from "vitest-mock-extended"; import { dbMock } from "./mocks/db.mock.js"; import { exifRMock } from "./mocks/exifreader.mock.js"; import ffmpegMock from "./mocks/ffmpeg.mock.js"; +import { fsMock } from "./mocks/fs.mock.js"; import { igProviderMock } from "./mocks/ig.mock.js"; import NLPMock from "./mocks/nlp.mock.js"; import { pdfJsMock } from "./mocks/pdfjs.mock.js"; @@ -13,8 +16,9 @@ import sharpMock from "./mocks/sharp.mock.js"; import { tgProviderMock } from "./mocks/tg.mock.js"; import { wikipediaProviderMock } from "./mocks/wikipedia.mock.js"; -export interface AppMocks { +export interface DepsMocks { axios: typeof axiosMock; + fs: typeof fsMock; ffmpeg: typeof ffmpegMock; ner: NLPMock; db: typeof dbMock; @@ -37,16 +41,30 @@ export interface AppMocks { const fetchHTML = vi.fn(); const fetchMetadata = vi.fn(); -export const axiosMock = { - get: vi.fn(), - interceptors: { - request: { - use: vi.fn(), - }, - }, -}; +export const axiosMock: MockProxy = mock( + { + get: vi.fn(), + interceptors: mock( + { + request: { + use: vi.fn(), + eject: vi.fn(), + clear: vi.fn(), + }, + response: { + use: vi.fn(), + eject: vi.fn(), + clear: vi.fn(), + }, + }, + { deep: true }, + ), + } as any, + { deep: true }, +); -export const mocks: AppMocks = { +export const mocks: DepsMocks = { + fs: fsMock, wiki: wikipediaProviderMock, axios: axiosMock, ffmpeg: ffmpegMock, diff --git a/packages/@liexp/backend/src/test/mocks/db.mock.ts b/packages/@liexp/backend/src/test/mocks/db.mock.ts index c423b4de60..8497a90291 100644 --- a/packages/@liexp/backend/src/test/mocks/db.mock.ts +++ b/packages/@liexp/backend/src/test/mocks/db.mock.ts @@ -10,6 +10,8 @@ import { mock, mockDeep } from "vitest-mock-extended"; const queryBuilder = mockDeep>({ where: vi.fn().mockReturnThis(), orWhere: vi.fn().mockReturnThis(), + loadAllRelationIds: vi.fn().mockReturnThis(), + getOne: vi.fn().mockRejectedValue(new Error("getOne not implemented")), getOneOrFail: vi .fn() .mockRejectedValue(new Error("getOneOrFail not implemented")), diff --git a/packages/@liexp/backend/src/test/mocks/fs.mock.ts b/packages/@liexp/backend/src/test/mocks/fs.mock.ts new file mode 100644 index 0000000000..3a958b27b4 --- /dev/null +++ b/packages/@liexp/backend/src/test/mocks/fs.mock.ts @@ -0,0 +1,9 @@ +import type fs from "fs"; +import { vi } from "vitest"; +import { type DeepMockProxy, mock } from "vitest-mock-extended"; + +export const fsMock: DeepMockProxy = mock({ + existsSync: vi.fn().mockImplementation(() => { + throw new Error("fs.existsSync is not implemented"); + }), +}); diff --git a/packages/@liexp/backend/src/test/mocks/mock.utils.ts b/packages/@liexp/backend/src/test/mocks/mock.utils.ts new file mode 100644 index 0000000000..d8ad6d8d3a --- /dev/null +++ b/packages/@liexp/backend/src/test/mocks/mock.utils.ts @@ -0,0 +1,20 @@ +import { fp } from "@liexp/core/lib/fp/index.js"; +import { type TaskEither } from "fp-ts/lib/TaskEither"; +import { type MockInstance } from "vitest"; +import { type DeepMockProxy } from "vitest-mock-extended"; + +export const mockTERightOnce = ( + fn: MockInstance<(...args: any[]) => TaskEither>, + value: (...args: any[]) => U, +) => + fn.mockImplementationOnce((...args) => { + return fp.TE.right(value(...args)); + }); + +type MockedContext> = { + [K in keyof C]: DeepMockProxy; +}; + +export const mockedContext = >( + ctx: C, +): MockedContext => ctx; diff --git a/packages/@liexp/backend/src/test/mocks/sharp.mock.ts b/packages/@liexp/backend/src/test/mocks/sharp.mock.ts index ded7432e57..cacf151a67 100644 --- a/packages/@liexp/backend/src/test/mocks/sharp.mock.ts +++ b/packages/@liexp/backend/src/test/mocks/sharp.mock.ts @@ -1,7 +1,8 @@ +import type sharp from "sharp"; import { vi } from "vitest"; -import { mock } from "vitest-mock-extended"; +import { type DeepMockProxy, mock } from "vitest-mock-extended"; -export const sharpMock = mock({ +export const sharpMock: DeepMockProxy = mock({ keepExif: vi.fn().mockReturnThis(), rotate: vi.fn().mockReturnThis(), resize: vi.fn().mockReturnThis(), @@ -9,4 +10,4 @@ export const sharpMock = mock({ toBuffer: vi.fn().mockResolvedValueOnce(Buffer.from([])), }); -export default vi.fn(() => sharpMock); +export default sharpMock as any as typeof sharp; diff --git a/packages/@liexp/backend/urls/0d1785bf42b588eaa482b4a024c964d3096dc6e0.txt b/packages/@liexp/backend/urls/0d1785bf42b588eaa482b4a024c964d3096dc6e0.txt new file mode 100644 index 0000000000..acdfa86028 --- /dev/null +++ b/packages/@liexp/backend/urls/0d1785bf42b588eaa482b4a024c964d3096dc6e0.txt @@ -0,0 +1 @@ +"page content" \ No newline at end of file diff --git a/packages/@liexp/backend/vitest.config.ts.timestamp-1737064274359-67b01676a0689.mjs b/packages/@liexp/backend/vitest.config.ts.timestamp-1737064274359-67b01676a0689.mjs new file mode 100644 index 0000000000..105a54e69c --- /dev/null +++ b/packages/@liexp/backend/vitest.config.ts.timestamp-1737064274359-67b01676a0689.mjs @@ -0,0 +1,57 @@ +// vitest.config.ts +import viteTsconfigPaths2 from "file:///home/ascariandrea/Workspace/lies-exposed/node_modules/vite-tsconfig-paths/dist/index.js"; +import { defineConfig as defineConfig2, mergeConfig } from "file:///home/ascariandrea/Workspace/lies-exposed/node_modules/vitest/dist/config.js"; + +// src/test/vitest.base-config.ts +import { URL } from "url"; +import viteTsconfigPaths from "file:///home/ascariandrea/Workspace/lies-exposed/node_modules/vite-tsconfig-paths/dist/index.js"; +import { defineConfig } from "file:///home/ascariandrea/Workspace/lies-exposed/node_modules/vitest/dist/config.js"; +var __vite_injected_original_import_meta_url = "file:///home/ascariandrea/Workspace/lies-exposed/packages/@liexp/backend/src/test/vitest.base-config.ts"; +var PathnameAlias = (url) => (mockPath) => { + return new URL(mockPath, url).pathname; +}; +var toAlias = PathnameAlias(__vite_injected_original_import_meta_url); +var baseConfig = defineConfig({ + root: toAlias("./"), + test: { + environment: "node", + watch: false, + alias: { + sharp: toAlias("mocks/sharp.mock.js"), + canvas: toAlias("mocks/canvas.mock.js"), + "pdfjs-dist/legacy/build/pdf.js": toAlias("mocks/pdfjs.mock.js"), + "@blocknote/core": toAlias("mocks/blocknote-core.mock.js"), + "@blocknote/react/**": toAlias("mocks/blocknote-react.mock.js") + } + }, + plugins: [ + viteTsconfigPaths({ + root: toAlias("./") + }) + ] +}); + +// vitest.config.ts +var __vite_injected_original_dirname = "/home/ascariandrea/Workspace/lies-exposed/packages/@liexp/backend"; +var vitest_config_default = mergeConfig(baseConfig, defineConfig2({ + test: { + name: "@liexp/backend", + root: __vite_injected_original_dirname, + globals: true, + include: [__vite_injected_original_dirname + "/src/**/*.spec.ts"], + watch: false, + coverage: { + thresholds: { + lines: 90, + statements: 80, + functions: 80 + } + } + }, + plugins: [viteTsconfigPaths2({ root: __vite_injected_original_dirname })], + root: __vite_injected_original_dirname +})); +export { + vitest_config_default as default +}; +//# sourceMappingURL=data:application/json;base64,ewogICJ2ZXJzaW9uIjogMywKICAic291cmNlcyI6IFsidml0ZXN0LmNvbmZpZy50cyIsICJzcmMvdGVzdC92aXRlc3QuYmFzZS1jb25maWcudHMiXSwKICAic291cmNlc0NvbnRlbnQiOiBbImNvbnN0IF9fdml0ZV9pbmplY3RlZF9vcmlnaW5hbF9kaXJuYW1lID0gXCIvaG9tZS9hc2NhcmlhbmRyZWEvV29ya3NwYWNlL2xpZXMtZXhwb3NlZC9wYWNrYWdlcy9AbGlleHAvYmFja2VuZFwiO2NvbnN0IF9fdml0ZV9pbmplY3RlZF9vcmlnaW5hbF9maWxlbmFtZSA9IFwiL2hvbWUvYXNjYXJpYW5kcmVhL1dvcmtzcGFjZS9saWVzLWV4cG9zZWQvcGFja2FnZXMvQGxpZXhwL2JhY2tlbmQvdml0ZXN0LmNvbmZpZy50c1wiO2NvbnN0IF9fdml0ZV9pbmplY3RlZF9vcmlnaW5hbF9pbXBvcnRfbWV0YV91cmwgPSBcImZpbGU6Ly8vaG9tZS9hc2NhcmlhbmRyZWEvV29ya3NwYWNlL2xpZXMtZXhwb3NlZC9wYWNrYWdlcy9AbGlleHAvYmFja2VuZC92aXRlc3QuY29uZmlnLnRzXCI7aW1wb3J0IHZpdGVUc2NvbmZpZ1BhdGhzIGZyb20gXCJ2aXRlLXRzY29uZmlnLXBhdGhzXCI7XG5pbXBvcnQgeyBkZWZpbmVDb25maWcsIG1lcmdlQ29uZmlnIH0gZnJvbSBcInZpdGVzdC9jb25maWdcIjtcbmltcG9ydCB7IGJhc2VDb25maWcgfSBmcm9tIFwiLi9zcmMvdGVzdC92aXRlc3QuYmFzZS1jb25maWdcIjtcblxuZXhwb3J0IGRlZmF1bHQgbWVyZ2VDb25maWcoYmFzZUNvbmZpZywgZGVmaW5lQ29uZmlnKHtcbiAgdGVzdDoge1xuICAgIG5hbWU6IFwiQGxpZXhwL2JhY2tlbmRcIixcbiAgICByb290OiBfX2Rpcm5hbWUsXG4gICAgZ2xvYmFsczogdHJ1ZSxcbiAgICBpbmNsdWRlOiBbX19kaXJuYW1lICsgXCIvc3JjLyoqLyouc3BlYy50c1wiXSxcbiAgICB3YXRjaDogZmFsc2UsXG4gICAgY292ZXJhZ2U6IHtcbiAgICAgIHRocmVzaG9sZHM6IHtcbiAgICAgICAgbGluZXM6IDkwLFxuICAgICAgICBzdGF0ZW1lbnRzOiA4MCxcbiAgICAgICAgZnVuY3Rpb25zOiA4MCxcbiAgICAgIH0sXG4gICAgfSxcbiAgfSxcbiAgcGx1Z2luczogW3ZpdGVUc2NvbmZpZ1BhdGhzKHsgcm9vdDogX19kaXJuYW1lIH0pXSxcbiAgcm9vdDogX19kaXJuYW1lLFxufSkpO1xuIiwgImNvbnN0IF9fdml0ZV9pbmplY3RlZF9vcmlnaW5hbF9kaXJuYW1lID0gXCIvaG9tZS9hc2NhcmlhbmRyZWEvV29ya3NwYWNlL2xpZXMtZXhwb3NlZC9wYWNrYWdlcy9AbGlleHAvYmFja2VuZC9zcmMvdGVzdFwiO2NvbnN0IF9fdml0ZV9pbmplY3RlZF9vcmlnaW5hbF9maWxlbmFtZSA9IFwiL2hvbWUvYXNjYXJpYW5kcmVhL1dvcmtzcGFjZS9saWVzLWV4cG9zZWQvcGFja2FnZXMvQGxpZXhwL2JhY2tlbmQvc3JjL3Rlc3Qvdml0ZXN0LmJhc2UtY29uZmlnLnRzXCI7Y29uc3QgX192aXRlX2luamVjdGVkX29yaWdpbmFsX2ltcG9ydF9tZXRhX3VybCA9IFwiZmlsZTovLy9ob21lL2FzY2FyaWFuZHJlYS9Xb3Jrc3BhY2UvbGllcy1leHBvc2VkL3BhY2thZ2VzL0BsaWV4cC9iYWNrZW5kL3NyYy90ZXN0L3ZpdGVzdC5iYXNlLWNvbmZpZy50c1wiO2ltcG9ydCB7IFVSTCB9IGZyb20gXCJ1cmxcIjtcbmltcG9ydCB2aXRlVHNjb25maWdQYXRocyBmcm9tIFwidml0ZS10c2NvbmZpZy1wYXRoc1wiO1xuaW1wb3J0IHsgZGVmaW5lQ29uZmlnIH0gZnJvbSBcInZpdGVzdC9jb25maWdcIjtcblxuZXhwb3J0IGNvbnN0IFBhdGhuYW1lQWxpYXMgPVxuICAodXJsOiBzdHJpbmcpID0+XG4gIChtb2NrUGF0aDogc3RyaW5nKTogc3RyaW5nID0+IHtcbiAgICByZXR1cm4gbmV3IFVSTChtb2NrUGF0aCwgdXJsKS5wYXRobmFtZTtcbiAgfTtcblxuY29uc3QgdG9BbGlhcyA9IFBhdGhuYW1lQWxpYXMoaW1wb3J0Lm1ldGEudXJsKTtcblxuZXhwb3J0IGNvbnN0IGJhc2VDb25maWcgPSBkZWZpbmVDb25maWcoe1xuICByb290OiB0b0FsaWFzKFwiLi9cIiksXG4gIHRlc3Q6IHtcbiAgICBlbnZpcm9ubWVudDogXCJub2RlXCIsXG4gICAgd2F0Y2g6IGZhbHNlLFxuICAgIGFsaWFzOiB7XG4gICAgICBzaGFycDogdG9BbGlhcyhcIm1vY2tzL3NoYXJwLm1vY2suanNcIiksXG4gICAgICBjYW52YXM6IHRvQWxpYXMoXCJtb2Nrcy9jYW52YXMubW9jay5qc1wiKSxcbiAgICAgIFwicGRmanMtZGlzdC9sZWdhY3kvYnVpbGQvcGRmLmpzXCI6IHRvQWxpYXMoXCJtb2Nrcy9wZGZqcy5tb2NrLmpzXCIpLFxuICAgICAgXCJAYmxvY2tub3RlL2NvcmVcIjogdG9BbGlhcyhcIm1vY2tzL2Jsb2Nrbm90ZS1jb3JlLm1vY2suanNcIiksXG4gICAgICBcIkBibG9ja25vdGUvcmVhY3QvKipcIjogdG9BbGlhcyhcIm1vY2tzL2Jsb2Nrbm90ZS1yZWFjdC5tb2NrLmpzXCIpLFxuICAgIH0sXG4gIH0sXG4gIHBsdWdpbnM6IFtcbiAgICB2aXRlVHNjb25maWdQYXRocyh7XG4gICAgICByb290OiB0b0FsaWFzKFwiLi9cIiksXG4gICAgfSksXG4gIF0sXG59KTtcbiJdLAogICJtYXBwaW5ncyI6ICI7QUFBeVgsT0FBT0Esd0JBQXVCO0FBQ3ZaLFNBQVMsZ0JBQUFDLGVBQWMsbUJBQW1COzs7QUNEb1gsU0FBUyxXQUFXO0FBQ2xiLE9BQU8sdUJBQXVCO0FBQzlCLFNBQVMsb0JBQW9CO0FBRnNPLElBQU0sMkNBQTJDO0FBSTdTLElBQU0sZ0JBQ1gsQ0FBQyxRQUNELENBQUMsYUFBNkI7QUFDNUIsU0FBTyxJQUFJLElBQUksVUFBVSxHQUFHLEVBQUU7QUFDaEM7QUFFRixJQUFNLFVBQVUsY0FBYyx3Q0FBZTtBQUV0QyxJQUFNLGFBQWEsYUFBYTtBQUFBLEVBQ3JDLE1BQU0sUUFBUSxJQUFJO0FBQUEsRUFDbEIsTUFBTTtBQUFBLElBQ0osYUFBYTtBQUFBLElBQ2IsT0FBTztBQUFBLElBQ1AsT0FBTztBQUFBLE1BQ0wsT0FBTyxRQUFRLHFCQUFxQjtBQUFBLE1BQ3BDLFFBQVEsUUFBUSxzQkFBc0I7QUFBQSxNQUN0QyxrQ0FBa0MsUUFBUSxxQkFBcUI7QUFBQSxNQUMvRCxtQkFBbUIsUUFBUSw4QkFBOEI7QUFBQSxNQUN6RCx1QkFBdUIsUUFBUSwrQkFBK0I7QUFBQSxJQUNoRTtBQUFBLEVBQ0Y7QUFBQSxFQUNBLFNBQVM7QUFBQSxJQUNQLGtCQUFrQjtBQUFBLE1BQ2hCLE1BQU0sUUFBUSxJQUFJO0FBQUEsSUFDcEIsQ0FBQztBQUFBLEVBQ0g7QUFDRixDQUFDOzs7QUQ5QkQsSUFBTSxtQ0FBbUM7QUFJekMsSUFBTyx3QkFBUSxZQUFZLFlBQVlDLGNBQWE7QUFBQSxFQUNsRCxNQUFNO0FBQUEsSUFDSixNQUFNO0FBQUEsSUFDTixNQUFNO0FBQUEsSUFDTixTQUFTO0FBQUEsSUFDVCxTQUFTLENBQUMsbUNBQVksbUJBQW1CO0FBQUEsSUFDekMsT0FBTztBQUFBLElBQ1AsVUFBVTtBQUFBLE1BQ1IsWUFBWTtBQUFBLFFBQ1YsT0FBTztBQUFBLFFBQ1AsWUFBWTtBQUFBLFFBQ1osV0FBVztBQUFBLE1BQ2I7QUFBQSxJQUNGO0FBQUEsRUFDRjtBQUFBLEVBQ0EsU0FBUyxDQUFDQyxtQkFBa0IsRUFBRSxNQUFNLGlDQUFVLENBQUMsQ0FBQztBQUFBLEVBQ2hELE1BQU07QUFDUixDQUFDLENBQUM7IiwKICAibmFtZXMiOiBbInZpdGVUc2NvbmZpZ1BhdGhzIiwgImRlZmluZUNvbmZpZyIsICJkZWZpbmVDb25maWciLCAidml0ZVRzY29uZmlnUGF0aHMiXQp9Cg== diff --git a/packages/@liexp/shared/src/endpoints/admin.endpoints.ts b/packages/@liexp/shared/src/endpoints/admin.endpoints.ts index 79784dadff..e345028fc5 100644 --- a/packages/@liexp/shared/src/endpoints/admin.endpoints.ts +++ b/packages/@liexp/shared/src/endpoints/admin.endpoints.ts @@ -85,7 +85,7 @@ export const BuildImage = Endpoint({ ), }), }, - Output: t.any, + Output: Output(t.strict({ success: t.boolean }), "BuildImageOutput"), }); export const SearchAreaCoordinates = Endpoint({ diff --git a/packages/@liexp/shared/src/tests/arbitrary/Date.arbitrary.ts b/packages/@liexp/shared/src/tests/arbitrary/Date.arbitrary.ts index 2b715b6148..5c623e9479 100644 --- a/packages/@liexp/shared/src/tests/arbitrary/Date.arbitrary.ts +++ b/packages/@liexp/shared/src/tests/arbitrary/Date.arbitrary.ts @@ -1,4 +1,4 @@ -import { fc } from "@liexp/test"; +import { fc } from "@liexp/test/lib/index.js"; import { addYears, subYears } from "date-fns"; export const MIN_DATE = subYears(new Date(), 200); diff --git a/packages/@liexp/shared/src/tests/arbitrary/HumanReadableString.arbitrary.ts b/packages/@liexp/shared/src/tests/arbitrary/HumanReadableString.arbitrary.ts index 008923d6d6..dd04eaf6d4 100644 --- a/packages/@liexp/shared/src/tests/arbitrary/HumanReadableString.arbitrary.ts +++ b/packages/@liexp/shared/src/tests/arbitrary/HumanReadableString.arbitrary.ts @@ -1,4 +1,4 @@ -import { fc } from "@liexp/test"; +import { fc } from "@liexp/test/lib/index.js"; function capFirst(word: string): string { return word.charAt(0).toUpperCase() + word.slice(1); diff --git a/packages/@liexp/shared/src/tests/arbitrary/URL.arbitrary.ts b/packages/@liexp/shared/src/tests/arbitrary/URL.arbitrary.ts index e6042d21ae..df0cfe04fe 100644 --- a/packages/@liexp/shared/src/tests/arbitrary/URL.arbitrary.ts +++ b/packages/@liexp/shared/src/tests/arbitrary/URL.arbitrary.ts @@ -1,4 +1,4 @@ -import { fc } from "@liexp/test"; +import { fc } from "@liexp/test/lib/index.js"; import { HumanReadableStringArb } from "./HumanReadableString.arbitrary.js"; export const URLArb = fc diff --git a/packages/@liexp/shared/src/tests/arbitrary/common/UUID.arbitrary.ts b/packages/@liexp/shared/src/tests/arbitrary/common/UUID.arbitrary.ts index a58851fa27..ea610da035 100644 --- a/packages/@liexp/shared/src/tests/arbitrary/common/UUID.arbitrary.ts +++ b/packages/@liexp/shared/src/tests/arbitrary/common/UUID.arbitrary.ts @@ -1,4 +1,4 @@ -import { fc } from "@liexp/test"; +import { fc } from "@liexp/test/lib/index.js"; import { type UUID } from "../../../io/http/Common/UUID.js"; export const UUIDArb: fc.Arbitrary = fc.uuidV(4) as fc.Arbitrary; diff --git a/packages/@liexp/shared/src/tests/arbitrary/events/ScientificStudy.arbitrary.ts b/packages/@liexp/shared/src/tests/arbitrary/events/ScientificStudy.arbitrary.ts index 378e0e9d3e..1411e38a43 100644 --- a/packages/@liexp/shared/src/tests/arbitrary/events/ScientificStudy.arbitrary.ts +++ b/packages/@liexp/shared/src/tests/arbitrary/events/ScientificStudy.arbitrary.ts @@ -1,5 +1,5 @@ import { propsOmit } from "@liexp/core/lib/io/utils.js"; -import * as tests from "@liexp/test"; +import * as tests from "@liexp/test/lib/index.js"; import * as t from "io-ts"; import * as http from "../../../io/http/index.js"; import { MIN_DATE, MAX_DATE, DateArb } from "../Date.arbitrary.js"; diff --git a/services/ai-bot/src/run.ts b/services/ai-bot/src/run.ts index 00d655a4b5..50e32b0536 100644 --- a/services/ai-bot/src/run.ts +++ b/services/ai-bot/src/run.ts @@ -1,3 +1,4 @@ +import fs from "fs"; import path from "path"; import { type AvailableModels, @@ -136,7 +137,7 @@ void pipe( fp.TE.bind("env", () => pipe(loadAndParseENV(parseENV)(process.cwd()), fp.TE.fromEither), ), - fp.TE.bind("fs", () => fp.TE.right(GetFSClient())), + fp.TE.bind("fs", () => fp.TE.right(GetFSClient({ client: fs }))), fp.TE.bind("config", ({ fs }) => configProvider({ fs })), fp.TE.bind("langchain", ({ config }) => fp.TE.right( diff --git a/services/api/src/app/config.ts b/services/api/src/app/config.ts index 72fff66501..ad461fac69 100644 --- a/services/api/src/app/config.ts +++ b/services/api/src/app/config.ts @@ -1,4 +1,3 @@ -import { mkdirSync } from "fs"; import path from "path"; import { type BEConfig } from "@liexp/backend/lib/context/config.context.js"; import { EventsConfig } from "@liexp/backend/lib/queries/config/index.js"; @@ -24,13 +23,6 @@ export const Config = (env: ENV, cwd: string): AppConfig => { stats: path.resolve(tempRoot, "stats"), }; - // TODO: handle properly a possible error thrown by mkdirSync - [...Object.values(configFolders), ...Object.values(tempFolders)].forEach( - (folder) => { - mkdirSync(folder, { recursive: true }); - }, - ); - return { cors: { origin: env.NODE_ENV === "production" ? true : "*", diff --git a/services/api/src/context/context.type.ts b/services/api/src/context/context.type.ts index cf36781eab..80524c5e2c 100644 --- a/services/api/src/context/context.type.ts +++ b/services/api/src/context/context.type.ts @@ -4,7 +4,6 @@ import { type FFMPEGProviderContext, type GeocodeProviderContext, type NERProviderContext, - type URLMetadataContext, type WikipediaProviderContext, } from "@liexp/backend/lib/context/index.js"; import { type JWTProviderContext } from "@liexp/backend/lib/context/jwt.context.js"; @@ -13,6 +12,7 @@ import { type PuppeteerProviderContext } from "@liexp/backend/lib/context/puppet import { type QueuesProviderContext } from "@liexp/backend/lib/context/queue.context.js"; import { type RedisContext } from "@liexp/backend/lib/context/redis.context.js"; import { type SpaceContext } from "@liexp/backend/lib/context/space.context.js"; +import { type URLMetadataContext } from "@liexp/backend/lib/context/urlMetadata.context.js"; import { type WikipediaProvider } from "@liexp/backend/lib/providers/wikipedia/wikipedia.provider.js"; import { type ServerBlockNoteEditor } from "@liexp/shared/lib/providers/blocknote/ssr.js"; import { type HTTPProvider } from "@liexp/shared/lib/providers/http/http.provider.js"; diff --git a/services/api/src/context/index.ts b/services/api/src/context/index.ts index 477b418949..5f4521f2a2 100644 --- a/services/api/src/context/index.ts +++ b/services/api/src/context/index.ts @@ -1,3 +1,4 @@ +import * as fs from "fs"; import * as path from "path"; import { MakeURLMetadata } from "@liexp/backend/lib/providers/URLMetadata.provider.js"; import { GetFFMPEGProvider } from "@liexp/backend/lib/providers/ffmpeg/ffmpeg.provider.js"; @@ -8,7 +9,8 @@ import { GetNERProvider } from "@liexp/backend/lib/providers/ner/ner.provider.js import { GetTypeORMClient } from "@liexp/backend/lib/providers/orm/index.js"; import { GetPuppeteerProvider } from "@liexp/backend/lib/providers/puppeteer.provider.js"; import { GetQueueProvider } from "@liexp/backend/lib/providers/queue.provider.js"; -import { createS3Provider } from "@liexp/backend/lib/providers/space/creates3.provider.js"; +import { createS3ProviderConfig } from "@liexp/backend/lib/providers/space/creates3ProviderConfig.js"; +import { MakeSpaceProvider } from "@liexp/backend/lib/providers/space/space.provider.js"; import { WikipediaProvider } from "@liexp/backend/lib/providers/wikipedia/wikipedia.provider.js"; import { getDataSource, @@ -86,7 +88,7 @@ export const makeContext = }), }); - const fsClient = GetFSClient(); + const fsClient = GetFSClient({ client: fs }); const jwtClient = GetJWTProvider({ secret: env.JWT_SECRET, @@ -113,24 +115,12 @@ export const makeContext = sequenceS(fp.TE.ApplicativePar)({ logger: fp.TE.right(serverLogger), db, - s3: fp.TE.right(createS3Provider(env)), + s3: fp.TE.right(MakeSpaceProvider(createS3ProviderConfig(env))), fs: fp.TE.right(fsClient), jwt: fp.TE.right(jwtClient), urlMetadata: fp.TE.right(urlMetadataClient), env: fp.TE.right(env), blocknote: fp.TE.right(editor), - // tg: pipe( - // TGBotProvider( - // { logger: serverLogger }, - // { - // token: env.TG_BOT_TOKEN, - // chat: env.TG_BOT_CHAT, - // polling: env.TG_BOT_POLLING, - // baseApiUrl: env.TG_BOT_BASE_API_URL, - // }, - // ), - // fp.TE.right, - // ), puppeteer: fp.TE.right( GetPuppeteerProvider( puppeteer as any as VanillaPuppeteer, diff --git a/services/api/src/io/ControllerError.ts b/services/api/src/io/ControllerError.ts index 6bbdde6a95..aee176c91e 100644 --- a/services/api/src/io/ControllerError.ts +++ b/services/api/src/io/ControllerError.ts @@ -9,7 +9,7 @@ import { type FSError } from "@liexp/backend/lib/providers/fs/fs.provider.js"; import { JWTError } from "@liexp/backend/lib/providers/jwt/jwt.provider.js"; import { type NERError } from "@liexp/backend/lib/providers/ner/ner.provider.js"; import { DBError } from "@liexp/backend/lib/providers/orm/index.js"; -import { RedisError } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { RedisError } from "@liexp/backend/lib/providers/redis/redis.error.js"; import { SpaceError } from "@liexp/backend/lib/providers/space/space.provider.js"; import { fp } from "@liexp/core/lib/fp/index.js"; import { diff --git a/services/api/src/routes/admin/images/buildImage.controller.ts b/services/api/src/routes/admin/images/buildImage.controller.ts index 70bd4b177a..efe1f0c69f 100644 --- a/services/api/src/routes/admin/images/buildImage.controller.ts +++ b/services/api/src/routes/admin/images/buildImage.controller.ts @@ -1,4 +1,4 @@ -import { buildImageWithSharp } from "@liexp/backend/lib/flows/media/admin/build-image/buildImageWithSharp.flow.js"; +import { BuildImageWithSharpPubSub } from "@liexp/backend/lib/pubsub/buildImageWithSharp.pubSub.js"; import { pipe } from "@liexp/core/lib/fp/index.js"; import { Endpoints } from "@liexp/shared/lib/endpoints/index.js"; import { type BuildImageLayer } from "@liexp/shared/lib/io/http/admin/BuildImage.js"; @@ -29,9 +29,9 @@ export const MakeAdminBuildImageRoute: Route = (r, ctx) => { layers.push(watermarkLayer); } return pipe( - buildImageWithSharp(layers)(ctx), - TE.map((buffer) => ({ - body: buffer.toString("base64"), + BuildImageWithSharpPubSub.publish({ image: null, layers })(ctx), + TE.map(() => ({ + body: { success: true }, statusCode: 201, })), ); diff --git a/services/api/src/routes/events/__tests__/searchEvents.e2e.ts b/services/api/src/routes/events/__tests__/searchEvents.e2e.ts index 4921b312d2..de4ee16b64 100644 --- a/services/api/src/routes/events/__tests__/searchEvents.e2e.ts +++ b/services/api/src/routes/events/__tests__/searchEvents.e2e.ts @@ -8,7 +8,7 @@ import { UncategorizedArb } from "@liexp/shared/lib/tests/arbitrary/Event.arbitr import { GroupArb } from "@liexp/shared/lib/tests/arbitrary/Group.arbitrary.js"; import { GroupMemberArb } from "@liexp/shared/lib/tests/index.js"; import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; -import { fc } from "@liexp/test"; +import { fc } from "@liexp/test/lib/index.js"; import * as A from "fp-ts/lib/Array.js"; import jwt from "jsonwebtoken"; import { GetAppTest, type AppTest } from "../../../../test/AppTest.js"; diff --git a/services/api/src/routes/events/scientific-study/__tests__/createScientificStudy.e2e.ts b/services/api/src/routes/events/scientific-study/__tests__/createScientificStudy.e2e.ts index 445396fa4d..23a656561f 100644 --- a/services/api/src/routes/events/scientific-study/__tests__/createScientificStudy.e2e.ts +++ b/services/api/src/routes/events/scientific-study/__tests__/createScientificStudy.e2e.ts @@ -17,7 +17,6 @@ import { GroupArb } from "@liexp/shared/lib/tests/arbitrary/Group.arbitrary.js"; import { HumanReadableStringArb } from "@liexp/shared/lib/tests/arbitrary/HumanReadableString.arbitrary.js"; import { LinkArb } from "@liexp/shared/lib/tests/index.js"; import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; -import { sanitizeURL } from "@liexp/shared/lib/utils/url.utils.js"; import { fc } from "@liexp/test"; import { In } from "typeorm"; import { GetAppTest, type AppTest } from "../../../../../test/AppTest.js"; @@ -82,87 +81,6 @@ describe("Create Scientific Study", () => { await appTest.utils.e2eAfterAll(); }); - test("Should create a scientific study from url", async () => { - const [url] = fc - .sample(fc.nat(), 1) - .map((id) => `https://www.sciencedirect.com/article/${id}` as any); - - const title = fc.sample(HumanReadableStringArb(), 1)[0]; - const description = fc.sample(HumanReadableStringArb(), 1)[0]; - - const scientificStudyData = { url }; - - appTest.mocks.redis.publish.mockResolvedValue(1); - appTest.mocks.urlMetadata.fetchMetadata.mockResolvedValue({ - title, - description, - url: scientificStudyData.url, - keywords: [], - }); - - appTest.mocks.puppeteer.page.goto.mockResolvedValueOnce(undefined); - - // evaluate title - appTest.mocks.puppeteer.page.$eval.mockResolvedValueOnce(title); - // evaluate dropdown click - appTest.mocks.puppeteer.page.click.mockResolvedValueOnce(undefined); - // evaluate date string - appTest.mocks.puppeteer.page.$eval.mockResolvedValueOnce([ - "Received 27 July 2020", - "Accepted 1 August 2020", - ]); - // wait for - appTest.mocks.puppeteer.page.waitForSelector.mockResolvedValueOnce( - undefined, - ); - appTest.mocks.puppeteer.page.$$.mockResolvedValueOnce([ - { - evaluate: vi.fn().mockResolvedValue(description), - }, - ]); - - appTest.mocks.puppeteer.page.$eval.mockResolvedValueOnce("page content"); - - appTest.mocks.ner.winkMethods.learnCustomEntities.mockResolvedValueOnce( - {} as any, - ); - appTest.mocks.ner.doc.out.mockReturnValue([]); - appTest.mocks.ner.doc.sentences.mockReturnValue({ each: vi.fn() } as any); - appTest.mocks.ner.doc.customEntities.mockReturnValue({ - out: vi.fn().mockReturnValue([]), - } as any); - appTest.mocks.ner.doc.tokens.mockReturnValue({ each: vi.fn() } as any); - - const response = await appTest.req - .post(`/v1/scientific-studies`) - .set("Authorization", authorizationToken) - .send(scientificStudyData); - - const body = response.body.data; - expect(response.status).toEqual(201); - - expect(appTest.mocks.ner).toHaveBeenCalledTimes(1); - expect( - appTest.mocks.ner.winkMethods.learnCustomEntities, - ).toHaveBeenCalledTimes(1); - expect(appTest.mocks.ner.winkMethods.readDoc).toHaveBeenCalledTimes(1); - expect(appTest.mocks.ner.doc.out).toHaveBeenCalledTimes(1); - - const link = await pipe( - appTest.ctx.db.findOneOrFail(LinkEntity, { - where: { url: sanitizeURL(scientificStudyData.url) }, - }), - throwTE, - ); - - expect(body.type).toBe(SCIENTIFIC_STUDY.value); - expect(body.date).toBeDefined(); - expect(body.payload.url).toEqual(link.id); - expect(body.payload.title).toEqual(title); - - scientificStudyIds.push(body.id); - }); - test("Should create a scientific study from plain object", async () => { const [link] = await pipe( fc.sample(LinkArb, 1)[0], diff --git a/services/api/src/routes/media/__tests__/createMedia.e2e.ts b/services/api/src/routes/media/__tests__/createMedia.e2e.ts index 35841f9c58..5300f20675 100644 --- a/services/api/src/routes/media/__tests__/createMedia.e2e.ts +++ b/services/api/src/routes/media/__tests__/createMedia.e2e.ts @@ -42,7 +42,6 @@ describe("Create Media", () => { Test.mocks.puppeteer.page.goto.mockClear(); Test.mocks.puppeteer.page.waitForSelector.mockClear(); Test.mocks.puppeteer.page.$eval.mockClear(); - Test.mocks.sharp.mockClear(); Test.mocks.redis.publish.mockClear(); mockClear(sharpMock); }); diff --git a/services/api/src/run.ts b/services/api/src/run.ts index 657835babd..18a9d71a37 100644 --- a/services/api/src/run.ts +++ b/services/api/src/run.ts @@ -33,6 +33,14 @@ const run = (): Promise => { })), TE.mapLeft(ControllerError.report), TE.chain(({ ctx, app }) => { + // TODO: handle properly a possible error thrown by mkdirSync + [ + ...Object.values(ctx.config.dirs.config), + ...Object.values(ctx.config.dirs.temp), + ].forEach((folder) => { + ctx.fs._fs.mkdirSync(folder, { recursive: true }); + }); + const server = app.listen( ctx.env.SERVER_PORT, ctx.env.SERVER_HOST, diff --git a/services/api/test/AppTest.ts b/services/api/test/AppTest.ts index b44bc4b5d0..49d2a4be1f 100644 --- a/services/api/test/AppTest.ts +++ b/services/api/test/AppTest.ts @@ -17,7 +17,6 @@ import { ENV } from "#io/ENV.js"; import { GetFFMPEGProvider } from "@liexp/backend/lib/providers/ffmpeg/ffmpeg.provider.js"; import { GetFSClient } from "@liexp/backend/lib/providers/fs/fs.provider.js"; import { GeocodeProvider } from "@liexp/backend/lib/providers/geocode/geocode.provider.js"; -import { MakeImgProcClient } from "@liexp/backend/lib/providers/imgproc/imgproc.provider.js"; import { GetJWTProvider } from "@liexp/backend/lib/providers/jwt/jwt.provider.js"; import { GetNERProvider } from "@liexp/backend/lib/providers/ner/ner.provider.js"; import { GetTypeORMClient } from "@liexp/backend/lib/providers/orm/index.js"; @@ -35,7 +34,7 @@ import path from "path"; import supertest from "supertest"; import type TestAgent from "supertest/lib/agent.js"; import { vi } from "vitest"; -import { AppMocks, mocks } from "@liexp/backend/lib/test/mocks.js"; +import { DepsMocks, mocks } from "@liexp/backend/lib/test/mocks.js"; import { getDataSource, getORMConfig, @@ -56,7 +55,7 @@ vi.mock("node-telegram-bot-api"); export interface AppTest { ctx: ServerContext; - mocks: AppMocks; + mocks: DepsMocks; req: TestAgent; utils: { e2eAfterAll: () => Promise; @@ -93,10 +92,8 @@ export const loadAppContext = async ( { headless: "shell" }, mocks.puppeteer.devices, ), - tg: mocks.tg, s3: MakeSpaceProvider(mocks.s3 as any), - ig: mocks.ig, - fs: GetFSClient(), + fs: GetFSClient({ client: mocks.fs }), wp: mocks.wiki, rw: mocks.wiki, urlMetadata: { @@ -114,11 +111,6 @@ export const loadAppContext = async ( }, }, http: HTTPProvider(mocks.axios as any as AxiosInstance), - imgProc: MakeImgProcClient({ - logger, - exifR: mocks.exifR, - client: mocks.sharp as any, - }), ner: GetNERProvider({ logger, entitiesFile: path.resolve(config.dirs.config.nlp, "entities.json"), diff --git a/services/api/test/GetDockerContainer.ts b/services/api/test/GetDockerContainer.ts index 73c53f4dde..6ac19abd39 100644 --- a/services/api/test/GetDockerContainer.ts +++ b/services/api/test/GetDockerContainer.ts @@ -37,9 +37,8 @@ type GetDockerContainer = ( const listDatabases = async (): Promise => { const databases = await fs.readdir(DATABASES_FILE_PATH()); - // console.log("list databases", databases); - return databases; + return databases.filter((f) => DATABASE_RUN_COUNT_FILE_PATH() !== f); }; const readDatabases = async (): Promise> => { @@ -78,8 +77,9 @@ const getFirstFree = async (logger: Logger): Promise => { }; const waitForDatabase = async (logger: Logger): Promise => { - const [interval] = fc.sample(fc.integer({ min: 300, max: 600 }), 1); - return new Promise((resolve) => { + const [interval] = fc.sample(fc.integer({ min: 100, max: 200 }), 1); + return new Promise(async (resolve) => { + const databaseTimer = setInterval(async () => { const freeDatabase = await getFirstFree(logger); @@ -150,134 +150,133 @@ const safeUnlink = async (state?: STATE, db?: string) => { } }; -const GetDockerContainer: GetDockerContainer = - (logger) => (containerName) => { - const docker = new Docker(); +const GetDockerContainer: GetDockerContainer = (logger) => (containerName) => { + const docker = new Docker(); - const lookup = async () => { - const containers = await docker.listContainers({ all: true }); + const lookup = async () => { + const containers = await docker.listContainers({ all: true }); - logger.debug.log( - "Look for %s in %O", - containerName, - containers.map((c) => c.Names), - ); + logger.debug.log( + "Look for %s in %O", + containerName, + containers.map((c) => c.Names), + ); - const containerInfo = containers.find((c) => - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - c.Names.some((n) => n.indexOf(containerName) > 0), - ); + const containerInfo = containers.find((c) => + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions + c.Names.some((n) => n.indexOf(containerName) > 0), + ); - if (containerInfo) { - container = docker.getContainer(containerInfo.Id); + if (containerInfo) { + container = docker.getContainer(containerInfo.Id); - return container; - } + return container; + } - throw new Error(`Container ${containerName} not found`); - }; + throw new Error(`Container ${containerName} not found`); + }; - const addDatabases = async (dbCount: number): Promise => { - const currentDatabases = await listDatabases(); + const addDatabases = async (dbCount: number): Promise => { + const currentDatabases = await listDatabases(); - logger.debug.log( - `${currentDatabases.length} databases found, expected ${dbCount}`, - ); + logger.debug.log( + `${currentDatabases.length} databases found, expected ${dbCount}`, + ); - await fs.writeFile(DATABASE_RUN_COUNT_FILE_PATH(), "0"); - - if (currentDatabases.length >= dbCount) { - const databases = await readDatabases(); - await Promise.all( - Object.entries(databases).flatMap(async ([db, isUse]) => { - const promises = []; - if (isUse) { - promises.push(safeUnlink(IN_USE_STATE, db)); - } - promises.push( - fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, db), ""), - ); - - return promises; - }), - ); - - return currentDatabases; - } + await fs.writeFile(DATABASE_RUN_COUNT_FILE_PATH(), "0"); - container = await lookup(); + if (currentDatabases.length >= dbCount) { + const databases = await readDatabases(); + await Promise.all( + Object.entries(databases).flatMap(async ([db, isUse]) => { + const promises = []; + if (isUse) { + promises.push(safeUnlink(IN_USE_STATE, db)); + } + promises.push(fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, db), "")); + + return promises; + }), + ); - const databases = fc - .sample(HumanReadableStringArb(), dbCount) - .map((s) => s.toLowerCase().replace(/-/g, "_")); + return currentDatabases; + } - logger.debug.log("adding databases: %s", databases); - await execCommand(container, { - Cmd: [ - "/bin/bash", - "-h", - "/docker-entrypoint-initdb.d/psql-create-database.sh", - ], - Env: [`DBS=${databases.join(",")}`], - AttachStdout: true, - AttachStderr: true, - }); + container = await lookup(); + + const databases = fc + .sample(HumanReadableStringArb(), dbCount) + .map((s) => s.toLowerCase().replace(/-/g, "_")); + + logger.debug.log("adding databases: %s", databases); + await execCommand(container, { + Cmd: [ + "/bin/bash", + "-h", + "/docker-entrypoint-initdb.d/psql-create-database.sh", + ], + Env: [`DBS=${databases.join(",")}`], + AttachStdout: true, + AttachStderr: true, + }); - await Promise.all( - databases.map((db) => - fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, db), ""), - ), - ); + await Promise.all( + databases.map((db) => + fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, db), ""), + ), + ); - return databases; - }; + return databases; + }; - const markDatabaseAsUsed = async (database: string) => { - await safeUnlink(IN_USE_STATE, database); - await fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, database), ""); - }; + const markDatabaseAsUsed = async (database: string) => { + await safeUnlink(IN_USE_STATE, database); + await fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, database), ""); + }; - const freeDatabases = async () => { - const databases = await readDatabases(); + const freeDatabases = async () => { + const databases = await readDatabases(); - logger.info.log( - "Resetting databases %d to free %O", - Object.keys(databases).length, - ); + logger.info.log( + "Resetting databases %d to free %O", + Object.keys(databases).length, + ); - await Promise.all( - Object.entries(databases).flatMap(([db, state]) => [ - safeUnlink(state ? IN_USE_STATE : FREE_STATE, db), - fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, db), ""), - ]), - ); - }; - - const assertLocalCacheFolder = async () => { - try { - await fs.readdir(DATABASES_FILE_PATH(), { recursive: true }); - } catch (e: any) { - if (e.code !== "EEXIST") { - throw e; - } + await Promise.all( + Object.entries(databases).flatMap(([db, state]) => [ + safeUnlink(state ? IN_USE_STATE : FREE_STATE, db), + fs.writeFile(DATABASES_FILE_PATH(FREE_STATE, db), ""), + ]), + ); + }; + + const assertLocalCacheFolder = async () => { + try { + await fs.readdir(DATABASES_FILE_PATH(), { recursive: true }); + } catch (e: any) { + if (e.code !== "EEXIST") { + throw e; } - }; - - return { - lookup, - addDatabases, - waitForDatabase: () => waitForDatabase(logger), - markDatabaseAsUsed, - freeDatabases, - assertLocalCacheFolder, - getRunStats: async () => { - return { - used: await fs.readFile(DATABASE_RUN_COUNT_FILE_PATH(), "utf-8").then((s) => parseInt(s)), - }; - }, - }; + } }; + return { + lookup, + addDatabases, + waitForDatabase: () => waitForDatabase(logger), + markDatabaseAsUsed, + freeDatabases, + assertLocalCacheFolder, + getRunStats: async () => { + return { + used: await fs + .readFile(DATABASE_RUN_COUNT_FILE_PATH(), "utf-8") + .then((s) => parseInt(s)), + }; + }, + }; +}; + export const testDBContainer = GetDockerContainer(GetLogger("dkr"))( "db-test.liexp.dev", ); diff --git a/services/api/test/testSetup.ts b/services/api/test/testSetup.ts index a823d37268..c1d1255093 100644 --- a/services/api/test/testSetup.ts +++ b/services/api/test/testSetup.ts @@ -2,13 +2,8 @@ import { GetLogger } from "@liexp/core/lib/logger/Logger.js"; import { afterAll, beforeAll } from "vitest"; import { type AppTest, initAppTest, loadAppContext } from "./AppTest.js"; import { testDBContainer } from "./GetDockerContainer.js"; -import { upsertNLPEntities } from "@liexp/backend/lib/flows/admin/nlp/upsertEntities.flow.js"; -import { pipe } from "@liexp/core/lib/fp/index.js"; -import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; import D from "debug"; -import { getOlderThanOr } from "@liexp/backend/lib/flows/fs/getOlderThanOr.flow.js"; import { ServerContext } from '../src/context/context.type.js' -import path from 'path'; const logger = GetLogger("testSetup"); @@ -34,16 +29,6 @@ beforeAll(async () => { g.appContext = await loadAppContext(logger); } - const configFile = path.resolve(g.appContext.config.dirs.config.nlp, "entities.json") - - await pipe( - getOlderThanOr( - configFile, - 10, - )(upsertNLPEntities)(g.appContext), - throwTE, - ); - logger.debug.log("app context", !!g.appContext); g.appTest = await initAppTest(g.appContext, process.env.DB_DATABASE!); diff --git a/services/api/test/vitest.config.e2e.ts b/services/api/test/vitest.config.e2e.ts index e3b85bddfb..7a066476a7 100644 --- a/services/api/test/vitest.config.e2e.ts +++ b/services/api/test/vitest.config.e2e.ts @@ -1,5 +1,8 @@ import { defineProject, mergeConfig } from "vitest/config"; -import { PathnameAlias, baseConfig } from "@liexp/backend/lib/test/vitest.base-config.js"; +import { + PathnameAlias, + baseConfig, +} from "@liexp/backend/lib/test/vitest.base-config.js"; const toAlias = PathnameAlias(import.meta.url); @@ -14,10 +17,10 @@ const config = mergeConfig( setupFiles: [toAlias(`testSetup.ts`)], globalSetup: [toAlias(`globalSetup.ts`)], exclude: ["**/build", "**/src/migrations", "**/src/scripts"], - pool: "vmThreads", + pool: "forks", poolOptions: { - threads: { - singleThread: process.env.CI === "true" ? true : false, + forks: { + singleFork: process.env.CI === "true" ? true : false, isolate: false, }, }, @@ -25,4 +28,4 @@ const config = mergeConfig( }), ); -export default config; \ No newline at end of file +export default config; diff --git a/services/worker/src/bin/start-ctx.ts b/services/worker/src/bin/start-ctx.ts index 9bc5717fff..008c129ce6 100644 --- a/services/worker/src/bin/start-ctx.ts +++ b/services/worker/src/bin/start-ctx.ts @@ -4,6 +4,7 @@ import { ENVParser } from "@liexp/shared/lib/utils/env.utils.js"; import { throwTE } from "@liexp/shared/lib/utils/task.utils.js"; import D from "debug"; import { type WorkerContext } from "../context/context.js"; +import { loadImplementation } from "../context/load.js"; import { makeContext } from "../context/make.js"; import { ENV } from "../io/env.js"; @@ -15,10 +16,12 @@ export const startContext = async (env?: any): Promise => { D.enable(process.env.DEBUG ?? "@liexp:*:error"); + process.env.TG_BOT_POLLING = "false"; + return pipe( ENVParser(ENV.decode)({ ...process.env, TG_BOT_POLLING: "false", ...env }), fp.TE.fromEither, - fp.TE.chain(makeContext), + fp.TE.chain((env) => makeContext(env, loadImplementation(env))), throwTE, ); }; diff --git a/services/worker/src/context/context.ts b/services/worker/src/context/context.ts index d45f580f25..f171a02207 100644 --- a/services/worker/src/context/context.ts +++ b/services/worker/src/context/context.ts @@ -9,7 +9,6 @@ import { type ImgProcClientContext, type TGBotProviderContext, type WikipediaProviderContext, - type URLMetadataContext, type NERProviderContext, } from "@liexp/backend/lib/context/index.js"; import { type LoggerContext } from "@liexp/backend/lib/context/logger.context.js"; @@ -18,6 +17,7 @@ import { type PuppeteerProviderContext } from "@liexp/backend/lib/context/puppet import { type QueuesProviderContext } from "@liexp/backend/lib/context/queue.context.js"; import { type RedisContext } from "@liexp/backend/lib/context/redis.context.js"; import { type SpaceContext } from "@liexp/backend/lib/context/space.context.js"; +import { type URLMetadataContext } from "@liexp/backend/lib/context/urlMetadata.context.js"; import { type WikipediaProvider } from "@liexp/backend/lib/providers/wikipedia/wikipedia.provider.js"; import { type WorkerConfig } from "../config.js"; import { type ENV } from "#io/env.js"; diff --git a/services/worker/src/context/load.ts b/services/worker/src/context/load.ts index 407edcdad7..ccdb82fb9f 100644 --- a/services/worker/src/context/load.ts +++ b/services/worker/src/context/load.ts @@ -1,16 +1,99 @@ +import fs from "fs"; +import { createS3ProviderConfig } from "@liexp/backend/lib/providers/space/creates3ProviderConfig.js"; +import { + getDataSource, + getORMConfig, +} from "@liexp/backend/lib/utils/data-source.js"; import { loadAndParseENV } from "@liexp/core/lib/env/utils.js"; import { ENVParser } from "@liexp/shared/lib/utils/env.utils.js"; +import axios from "axios"; +import Ffmpeg from "fluent-ffmpeg"; import * as TE from "fp-ts/lib/TaskEither.js"; import { pipe } from "fp-ts/lib/function.js"; +import Redis from "ioredis"; +import MW from "nodemw"; +import metadataParser from "page-metadata-parser"; +import * as pdfJs from "pdfjs-dist/legacy/build/pdf.mjs"; +import * as pup from "puppeteer-core"; +import { addExtra, type VanillaPuppeteer } from "puppeteer-extra"; +import puppeteerStealth from "puppeteer-extra-plugin-stealth"; +import sharp from "sharp"; +import WinkFn from "wink-nlp"; import { type WorkerContext } from "./context.js"; -import { makeContext } from "./make.js"; +import { type ContextImplementation, makeContext } from "./make.js"; import { ENV } from "#io/env.js"; import { type WorkerError } from "#io/worker.error.js"; +export const loadImplementation = (env: ENV): ContextImplementation => { + const p = addExtra(pup as any); + p.use(puppeteerStealth()); + return { + redis: { + client: Redis.Redis as any, + }, + pdf: { + client: pdfJs, + }, + imgProc: { + client: () => Promise.resolve(sharp), + }, + ner: { + nlp: { + client: WinkFn, + }, + }, + puppeteer: { + client: p as any as VanillaPuppeteer, + }, + ffmpeg: { + client: Ffmpeg, + }, + urlMetadata: { + client: axios.create(), + parser: { + getMetadata: metadataParser.getMetadata, + }, + }, + wp: { + wiki: new MW({ + protocol: "https", + server: "en.wikipedia.org", + path: "/w", + debug: true, + concurrency: 5, + }), + http: axios.create({ + baseURL: "https://en.wikipedia.org/api/rest_v1", + }), + }, + rw: { + wiki: new MW({ + protocol: "https", + server: "rationalwiki.org", + path: "/w", + debug: true, + concurrency: 5, + }), + http: axios.create({ + baseURL: "https://rationalwiki.org/api/rest_v1", + }), + }, + http: { + client: axios.create(), + }, + geo: { + client: axios.create({ baseURL: env.GEO_CODE_BASE_URL }), + }, + space: createS3ProviderConfig(env), + db: { client: getDataSource(getORMConfig(env, false)) }, + fs: { client: fs }, + }; +}; + export const loadContext = (): TE.TaskEither => { return pipe( loadAndParseENV(ENVParser(ENV.decode))(process.cwd()), TE.fromEither, - TE.chain(makeContext), + TE.chain((env) => makeContext(env, loadImplementation(env))), ); }; diff --git a/services/worker/src/context/make.ts b/services/worker/src/context/make.ts index 812821ff99..7824a71df8 100644 --- a/services/worker/src/context/make.ts +++ b/services/worker/src/context/make.ts @@ -1,7 +1,13 @@ import path from "path"; -import { MakeURLMetadata } from "@liexp/backend/lib/providers/URLMetadata.provider.js"; +import { + MakeURLMetadata, + type MakeURLMetadataContext, +} from "@liexp/backend/lib/providers/URLMetadata.provider.js"; import { GetFFMPEGProvider } from "@liexp/backend/lib/providers/ffmpeg/ffmpeg.provider.js"; -import { GetFSClient } from "@liexp/backend/lib/providers/fs/fs.provider.js"; +import { + GetFSClient, + type GetFSClientContext, +} from "@liexp/backend/lib/providers/fs/fs.provider.js"; import { GeocodeProvider } from "@liexp/backend/lib/providers/geocode/geocode.provider.js"; import { IGProvider } from "@liexp/backend/lib/providers/ig/ig.provider.js"; import { MakeImgProcClient } from "@liexp/backend/lib/providers/imgproc/imgproc.provider.js"; @@ -9,107 +15,104 @@ import { GetNERProvider } from "@liexp/backend/lib/providers/ner/ner.provider.js import { GetTypeORMClient } from "@liexp/backend/lib/providers/orm/database.provider.js"; import { GetPuppeteerProvider } from "@liexp/backend/lib/providers/puppeteer.provider.js"; import { GetQueueProvider } from "@liexp/backend/lib/providers/queue.provider.js"; -import { createS3Provider } from "@liexp/backend/lib/providers/space/creates3.provider.js"; +import { RedisClient } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { + MakeSpaceProvider, + type MakeSpaceProviderConfig, +} from "@liexp/backend/lib/providers/space/space.provider.js"; import { TGBotProvider } from "@liexp/backend/lib/providers/tg/tg.provider.js"; import { WikipediaProvider } from "@liexp/backend/lib/providers/wikipedia/wikipedia.provider.js"; -import { - getDataSource, - getORMConfig, -} from "@liexp/backend/lib/utils/data-source.js"; import { fp, pipe } from "@liexp/core/lib/fp/index.js"; import { GetLogger } from "@liexp/core/lib/logger/index.js"; import { editor } from "@liexp/shared/lib/providers/blocknote/ssr.js"; import { HTTPProvider } from "@liexp/shared/lib/providers/http/http.provider.js"; import { PDFProvider } from "@liexp/shared/lib/providers/pdf/pdf.provider.js"; -import * as axios from "axios"; +import type * as axios from "axios"; import ExifReader from "exifreader"; -import ffmpeg from "fluent-ffmpeg"; +import type ffmpeg from "fluent-ffmpeg"; import { sequenceS } from "fp-ts/lib/Apply.js"; import { type TaskEither } from "fp-ts/lib/TaskEither.js"; -import { Redis } from "ioredis"; -import MW from "nodemw"; -import metadataParser from "page-metadata-parser"; +import type { Redis } from "ioredis"; +import TelegramBot from "node-telegram-bot-api"; +import type MW from "nodemw"; +import type * as pdfJS from "pdfjs-dist/legacy/build/pdf.mjs"; import * as puppeteer from "puppeteer-core"; import { type VanillaPuppeteer } from "puppeteer-extra"; import type sharpT from "sharp"; -import WinkFn from "wink-nlp"; +import { type DataSource } from "typeorm"; +import type winkNLP from "wink-nlp"; import { Config } from "../config.js"; import { type WorkerContext } from "./context.js"; import { type ENV } from "#io/env.js"; import { toWorkerError, type WorkerError } from "#io/worker.error.js"; +export interface ContextImplementation { + redis: { client: Redis }; + wp: { wiki: MW; http: axios.AxiosInstance }; + rw: { wiki: MW; http: axios.AxiosInstance }; + urlMetadata: MakeURLMetadataContext; + pdf: { client: typeof pdfJS }; + http: { client: axios.AxiosInstance }; + geo: { client: axios.AxiosInstance }; + imgProc: { client: () => Promise }; + ner: { nlp: { client: typeof winkNLP } }; + puppeteer: { client: VanillaPuppeteer }; + ffmpeg: { client: typeof ffmpeg }; + db: { client: TaskEither }; + space: MakeSpaceProviderConfig; + fs: GetFSClientContext; +} + export const makeContext = ( env: ENV, + impl: ContextImplementation, ): TaskEither => { const serverLogger = GetLogger("worker"); const config = Config(process.cwd()); const db = pipe( - getDataSource(getORMConfig(env, false)), + impl.db.client, fp.TE.chain(GetTypeORMClient), fp.TE.mapLeft(toWorkerError), ); - const fsClient = GetFSClient(); + const fsClient = GetFSClient(impl.fs); const wpProvider = WikipediaProvider({ logger: GetLogger("wp"), - client: new MW({ - protocol: "https", - server: "en.wikipedia.org", - path: "/w", - debug: true, - concurrency: 5, - }), - restClient: axios.default.create({ - baseURL: "https://en.wikipedia.org/api/rest_v1", - }), + client: impl.wp.wiki, + restClient: impl.wp.http, }); const rationalWikiProvider = WikipediaProvider({ logger: GetLogger("rw"), - client: new MW({ - protocol: "https", - server: "rationalwiki.org", - path: "/w", - debug: true, - concurrency: 5, - }), - restClient: axios.default.create({ - baseURL: "https://rationalwiki.org/api/rest_v1", - }), + client: impl.rw.wiki, + restClient: impl.rw.http, }); - const urlMetadataClient = MakeURLMetadata({ - client: axios.default.create({}), - parser: { - getMetadata: metadataParser.getMetadata, - }, - }); - - const redisClient = fp.TE.tryCatch(async () => { - const redis = new Redis(6379, env.REDIS_HOST, { - lazyConnect: true, - }); + const urlMetadataClient = MakeURLMetadata(impl.urlMetadata); - if (env.REDIS_CONNECT) { - await redis.connect(); - } - return redis; - }, toWorkerError); + const redisClient = RedisClient({ + port: 6379, + host: env.REDIS_HOST, + client: impl.redis.client, + }); return pipe( sequenceS(fp.TE.ApplicativePar)({ + env: fp.TE.right(env), logger: fp.TE.right(serverLogger), db, - s3: fp.TE.right(createS3Provider(env)), + s3: fp.TE.right(MakeSpaceProvider(impl.space)), fs: fp.TE.right(fsClient), - env: fp.TE.right(env), redis: redisClient, tg: pipe( TGBotProvider( - { logger: serverLogger }, + { + logger: serverLogger, + client: (token, opts) => new TelegramBot(token, opts), + }, { token: env.TG_BOT_TOKEN, chat: env.TG_BOT_CHAT, @@ -130,65 +133,56 @@ export const makeContext = ( ), urlMetadata: fp.TE.right(urlMetadataClient), puppeteer: fp.TE.right( - GetPuppeteerProvider( - puppeteer as any as VanillaPuppeteer, - {}, - puppeteer.KnownDevices, - ), + GetPuppeteerProvider(impl.puppeteer.client, {}, puppeteer.KnownDevices), ), - ffmpeg: fp.TE.right(GetFFMPEGProvider(ffmpeg)), + ffmpeg: fp.TE.right(GetFFMPEGProvider(impl.ffmpeg.client)), imgProc: pipe( - fp.TE.tryCatch( - (): Promise => - import("sharp").then((imp) => imp.default), - toWorkerError, - ), + fp.TE.tryCatch(() => impl.imgProc.client(), toWorkerError), fp.TE.map((sharp) => MakeImgProcClient({ logger: serverLogger.extend("imgproc"), - client: sharp.bind(sharp), + client: sharp, exifR: ExifReader, }), ), ), - http: fp.TE.right(HTTPProvider(axios.default.create({}))), + http: fp.TE.right(HTTPProvider(impl.http.client)), config: config({ fs: fsClient, logger: serverLogger }), - geo: fp.TE.right( + blocknote: fp.TE.right(editor), + wp: fp.TE.right(wpProvider), + rw: fp.TE.right(rationalWikiProvider), + }), + fp.TE.bind("geo", (ctx) => + fp.TE.right( GeocodeProvider({ - http: HTTPProvider( - axios.default.create({ baseURL: env.GEO_CODE_BASE_URL }), - ), - apiKey: env.GEO_CODE_API_KEY, + http: HTTPProvider(impl.geo.client), + apiKey: ctx.env.GEO_CODE_API_KEY, }), ), - blocknote: fp.TE.right(editor), - ner: fp.TE.right( + ), + fp.TE.bind("ner", (ctx) => + fp.TE.right( GetNERProvider({ logger: serverLogger.extend("ner"), - entitiesFile: path.resolve(process.cwd(), "config/nlp/entities.json"), - nlp: WinkFn, + entitiesFile: path.resolve( + ctx.config.dirs.config.nlp, + "entities.json", + ), + nlp: impl.ner.nlp.client, }), ), - wp: fp.TE.right(wpProvider), - rw: fp.TE.right(rationalWikiProvider), - }), + ), fp.TE.bind("queue", (ctx) => fp.TE.right(GetQueueProvider(fsClient, ctx.config.dirs.temp.queue)), ), fp.TE.bind("pdf", ({ config }) => - pipe( - fp.TE.tryCatch( - () => import("pdfjs-dist/legacy/build/pdf.mjs"), - toWorkerError, - ), - fp.TE.map((pdf) => - PDFProvider({ - client: pdf, - cMapUrl: config.dirs.pdf.cMapUrl, - cMapPacked: true, - standardFontDataUrl: config.dirs.pdf.standardFontDataUrl, - }), - ), + fp.TE.right( + PDFProvider({ + client: impl.pdf.client, + cMapUrl: config.dirs.pdf.cMapUrl, + cMapPacked: true, + standardFontDataUrl: config.dirs.pdf.standardFontDataUrl, + }), ), ), ); diff --git a/services/worker/src/io/worker.error.ts b/services/worker/src/io/worker.error.ts index 1ea2c218d3..e5ea9263ff 100644 --- a/services/worker/src/io/worker.error.ts +++ b/services/worker/src/io/worker.error.ts @@ -1,7 +1,7 @@ import { type NotFoundError } from "@liexp/backend/lib/errors/NotFoundError.js"; import { type IOError } from "@liexp/backend/lib/errors/index.js"; import { type DBError } from "@liexp/backend/lib/providers/orm/database.provider.js"; -import { type RedisError } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { type RedisError } from "@liexp/backend/lib/providers/redis/redis.error.js"; import { type SpaceError } from "@liexp/backend/lib/providers/space/space.provider.js"; import { type _DecodeError } from "@liexp/shared/lib/io/http/Error/DecodeError.js"; import { type HTTPError } from "@liexp/shared/lib/providers/http/http.provider.js"; diff --git a/services/worker/src/services/subscribers/WorkerSubscribers.ts b/services/worker/src/services/subscribers/WorkerSubscribers.ts index 7fbbc220fc..e8ac90a006 100644 --- a/services/worker/src/services/subscribers/WorkerSubscribers.ts +++ b/services/worker/src/services/subscribers/WorkerSubscribers.ts @@ -1,4 +1,4 @@ -import { type Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { type Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { fp, pipe } from "@liexp/core/lib/fp/index.js"; import { type RTE } from "../../types.js"; import { CreateEventFromURLSubscriber } from "./event/createEventFromURL.subscriber.js"; diff --git a/services/worker/src/services/subscribers/event/createEventFromURL.subscriber.ts b/services/worker/src/services/subscribers/event/createEventFromURL.subscriber.ts index 4c417246e1..d226e3ea53 100644 --- a/services/worker/src/services/subscribers/event/createEventFromURL.subscriber.ts +++ b/services/worker/src/services/subscribers/event/createEventFromURL.subscriber.ts @@ -1,5 +1,5 @@ import { createEventFromURL } from "@liexp/backend/lib/flows/event/createEventFromURL.flow.js"; -import { Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { CreateEventFromURLPubSub } from "@liexp/backend/lib/pubsub/events/createEventFromURL.pubSub.js"; import { UserRepository } from "@liexp/backend/lib/services/entity-repository.service.js"; import { fp } from "@liexp/core/lib/fp/index.js"; diff --git a/services/worker/src/services/subscribers/media/extractMediaExtra.subscriber.ts b/services/worker/src/services/subscribers/media/extractMediaExtra.subscriber.ts index 364d064701..7a57a9f244 100644 --- a/services/worker/src/services/subscribers/media/extractMediaExtra.subscriber.ts +++ b/services/worker/src/services/subscribers/media/extractMediaExtra.subscriber.ts @@ -1,6 +1,6 @@ import { type MediaEntity } from "@liexp/backend/lib/entities/Media.entity.js"; import { extractMediaExtra } from "@liexp/backend/lib/flows/media/extra/extractMediaExtra.flow.js"; -import { Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { ExtractMediaExtraPubSub } from "@liexp/backend/lib/pubsub/media/extractMediaExtra.pubSub.js"; import { MediaRepository } from "@liexp/backend/lib/services/entity-repository.service.js"; import { fp } from "@liexp/core/lib/fp/index.js"; diff --git a/services/worker/src/services/subscribers/media/generateThumbnail.subscriber.ts b/services/worker/src/services/subscribers/media/generateThumbnail.subscriber.ts index 726362aff5..4897b0eb80 100644 --- a/services/worker/src/services/subscribers/media/generateThumbnail.subscriber.ts +++ b/services/worker/src/services/subscribers/media/generateThumbnail.subscriber.ts @@ -1,5 +1,5 @@ import { generateThumbnailFlow } from "@liexp/backend/lib/flows/media/thumbnails/generateThumbnails.flow.js"; -import { Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { GenerateThumbnailPubSub } from "@liexp/backend/lib/pubsub/media/generateThumbnail.pubSub.js"; import { fp } from "@liexp/core/lib/fp/index.js"; import { pipe } from "fp-ts/lib/function.js"; diff --git a/services/worker/src/services/subscribers/media/transferFromExternalProvider.subscriber.ts b/services/worker/src/services/subscribers/media/transferFromExternalProvider.subscriber.ts index 8f14ce38ec..a1b822e3a6 100644 --- a/services/worker/src/services/subscribers/media/transferFromExternalProvider.subscriber.ts +++ b/services/worker/src/services/subscribers/media/transferFromExternalProvider.subscriber.ts @@ -1,4 +1,4 @@ -import { Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { TransferMediaFromExternalProviderPubSub } from "@liexp/backend/lib/pubsub/media/transferFromExternalProvider.pubSub.js"; import { MediaRepository } from "@liexp/backend/lib/services/entity-repository.service.js"; import { fp, pipe } from "@liexp/core/lib/fp/index.js"; diff --git a/services/worker/src/services/subscribers/nlp/extractEntitiesWithNLP.subscriber.ts b/services/worker/src/services/subscribers/nlp/extractEntitiesWithNLP.subscriber.ts index a5aae895bc..5637cc80ea 100644 --- a/services/worker/src/services/subscribers/nlp/extractEntitiesWithNLP.subscriber.ts +++ b/services/worker/src/services/subscribers/nlp/extractEntitiesWithNLP.subscriber.ts @@ -1,5 +1,5 @@ import { extractEntitiesFromAnyCached } from "@liexp/backend/lib/flows/admin/nlp/extractEntitiesFromAny.flow.js"; -import { Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { ExtractEntitiesWithNLP } from "@liexp/backend/lib/pubsub/nlp/extractEntitiesWithNLP.pubSub.js"; import { fp } from "@liexp/core/lib/fp/index.js"; import { pipe } from "fp-ts/lib/function.js"; diff --git a/services/worker/src/services/subscribers/social-post/PostToSocialPlatforms.subscriber.ts b/services/worker/src/services/subscribers/social-post/PostToSocialPlatforms.subscriber.ts index debef1565d..6bd6a202df 100644 --- a/services/worker/src/services/subscribers/social-post/PostToSocialPlatforms.subscriber.ts +++ b/services/worker/src/services/subscribers/social-post/PostToSocialPlatforms.subscriber.ts @@ -1,12 +1,13 @@ -import { Subscriber } from "@liexp/backend/lib/providers/redis/redis.provider.js"; +import { Subscriber } from "@liexp/backend/lib/providers/redis/Subscriber.js"; import { PostToSocialPlatformsPubSub } from "@liexp/backend/lib/pubsub/postToSocialPlatforms.pubSub.js"; import { fp } from "@liexp/core/lib/fp/index.js"; import { pipe } from "fp-ts/lib/function.js"; +import { type RTE } from "../../../types.js"; import { postToSocialPlatforms } from "#flows/social-post/postToPlatforms.flow.js"; export const PostToSocialPlatformsSubscriber = Subscriber( PostToSocialPlatformsPubSub, - (payload) => + (payload): RTE => pipe( postToSocialPlatforms(payload), fp.RTE.map(() => undefined), diff --git a/services/worker/test/WorkerTest.ts b/services/worker/test/WorkerTest.ts index 6fd311c54f..a8e26ee4e1 100644 --- a/services/worker/test/WorkerTest.ts +++ b/services/worker/test/WorkerTest.ts @@ -29,7 +29,7 @@ import * as TE from "fp-ts/lib/TaskEither.js"; import { pipe } from "fp-ts/lib/function.js"; import path from "path"; import { vi } from "vitest"; -import { mocks, type AppMocks } from "@liexp/backend/lib/test/mocks.js"; +import { mocks, type DepsMocks } from "@liexp/backend/lib/test/mocks.js"; import { WorkerContext } from "#context/context.js"; import { getDataSource, @@ -53,7 +53,7 @@ vi.mock("node-telegram-bot-api"); export interface WorkerTest { ctx: WorkerContext; - mocks: AppMocks; + mocks: DepsMocks; utils: { e2eAfterAll: () => Promise; };