Skip to content

Commit

Permalink
Feature/#1289 log optimisation (#395)
Browse files Browse the repository at this point in the history
* #1289 Log optimisation - added opt-out for log calls

* deps and version updated

* deps updated

* circleCI check

* circleCI check 2

* cleanup after merges

* coverity fixes
  • Loading branch information
yosheeck authored Apr 14, 2020
1 parent 221fe7f commit 12c8866
Show file tree
Hide file tree
Showing 15 changed files with 89 additions and 108 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ jobs:
--set ml-api-adapter.ml-api-adapter-service.image.repository=$DOCKER_ORG/$CIRCLE_PROJECT_REPONAME \
--set ml-api-adapter.ml-api-adapter-service.image.tag=$CIRCLE_TAG \
--set ml-api-adapter.ml-api-adapter-handler-notification.image.repository=$DOCKER_ORG/$CIRCLE_PROJECT_REPONAME \
--set ml-api-adapter.ml-api-adapter-handler-notification.image.tag=$CIRCLE_TAG
--set ml-api-adapter.ml-api-adapter-handler-notification.image.tag=$CIRCLE_TAG
##
# Workflows
Expand Down
47 changes: 5 additions & 42 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 9 additions & 9 deletions src/api/transfers/handler.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ const create = async function (request, h) {
span.setTracestateTags({ timeApiPrepare: `${Date.now()}` })
try {
span.setTags(getTransferSpanTags(request, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.PREPARE))
Logger.debug('create::payload(%s)', JSON.stringify(request.payload))
Logger.debug('create::headers(%s)', JSON.stringify(request.headers))
Logger.isDebugEnabled && Logger.debug('create::payload(%s)', JSON.stringify(request.payload))
Logger.isDebugEnabled && Logger.debug('create::headers(%s)', JSON.stringify(request.headers))
await span.audit({
headers: request.headers,
dataUri: request.dataUri,
Expand Down Expand Up @@ -104,9 +104,9 @@ const fulfilTransfer = async function (request, h) {
try {
span.setTags(getTransferSpanTags(request, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.FULFIL))
Validator.fulfilTransfer(request)
Logger.debug('fulfilTransfer::payload(%s)', JSON.stringify(request.payload))
Logger.debug('fulfilTransfer::headers(%s)', JSON.stringify(request.headers))
Logger.debug('fulfilTransfer::id(%s)', request.params.id)
Logger.isDebugEnabled && Logger.debug('fulfilTransfer::payload(%s)', JSON.stringify(request.payload))
Logger.isDebugEnabled && Logger.debug('fulfilTransfer::headers(%s)', JSON.stringify(request.headers))
Logger.isDebugEnabled && Logger.debug('fulfilTransfer::id(%s)', request.params.id)
await span.audit({
headers: request.headers,
dataUri: request.dataUri,
Expand Down Expand Up @@ -146,7 +146,7 @@ const getTransferById = async function (request, h) {
const span = request.span
try {
span.setTags(getTransferSpanTags(request, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.GET))
Logger.info(`getById::id(${request.params.id})`)
Logger.isInfoEnabled && Logger.info(`getById::id(${request.params.id})`)
await span.audit({
headers: request.headers,
params: request.params
Expand Down Expand Up @@ -183,9 +183,9 @@ const fulfilTransferError = async function (request, h) {
const span = request.span
try {
span.setTags(getTransferSpanTags(request, Enum.Events.Event.Type.TRANSFER, Enum.Events.Event.Action.ABORT))
Logger.debug('fulfilTransferError::payload(%s)', JSON.stringify(request.payload))
Logger.debug('fulfilTransferError::headers(%s)', JSON.stringify(request.headers))
Logger.debug('fulfilTransfer::id(%s)', request.params.id)
Logger.isDebugEnabled && Logger.debug('fulfilTransferError::payload(%s)', JSON.stringify(request.payload))
Logger.isDebugEnabled && Logger.debug('fulfilTransferError::headers(%s)', JSON.stringify(request.headers))
Logger.isDebugEnabled && Logger.debug('fulfilTransfer::id(%s)', request.params.id)
await span.audit({
headers: request.headers,
dataUri: request.dataUri,
Expand Down
6 changes: 3 additions & 3 deletions src/domain/participant/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ const getEndpoint = async (fsp, endpointType, transferId = null, span = null) =>
getEndpointSpan = span.getChild(`${span.getContext().service}_getEndpoint`)
getEndpointSpan.setTags({ endpointType, fsp })
}
Logger.debug(`domain::participant::getEndpoint::fsp - ${fsp}`)
Logger.debug(`domain::participant::getEndpoint::endpointType - ${endpointType}`)
Logger.debug(`domain::participant::getEndpoint::transferId - ${transferId}`)
Logger.isDebugEnabled && Logger.debug(`domain::participant::getEndpoint::fsp - ${fsp}`)
Logger.isDebugEnabled && Logger.debug(`domain::participant::getEndpoint::endpointType - ${endpointType}`)
Logger.isDebugEnabled && Logger.debug(`domain::participant::getEndpoint::transferId - ${transferId}`)

try {
const url = await Endpoints.getEndpoint(Config.ENDPOINT_SOURCE_URL, fsp, endpointType, { transferId })
Expand Down
32 changes: 16 additions & 16 deletions src/domain/transfer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,17 +51,17 @@ const generalEnum = require('@mojaloop/central-services-shared').Enum
* @returns {boolean} Returns true on successful publishing of message to kafka, throws error on failures
*/
const prepare = async (headers, dataUri, payload, span) => {
Logger.debug('domain::transfer::prepare::start(%s, %s)', headers, payload)
Logger.isDebugEnabled && Logger.debug('domain::transfer::prepare::start(%s, %s)', headers, payload)
try {
const state = StreamingProtocol.createEventState(generalEnum.Events.EventStatus.SUCCESS.status, generalEnum.Events.EventStatus.SUCCESS.code, generalEnum.Events.EventStatus.SUCCESS.description)
const event = StreamingProtocol.createEventMetadata(generalEnum.Events.Event.Type.PREPARE, generalEnum.Events.Event.Type.PREPARE, state)
const metadata = StreamingProtocol.createMetadata(payload.transferId, event)
let messageProtocol = StreamingProtocol.createMessageFromRequest(payload.transferId, { headers, dataUri, params: { id: payload.transferId } }, payload.payeeFsp, payload.payerFsp, metadata)
const topicConfig = KafkaUtil.createGeneralTopicConf(Config.KAFKA_CONFIG.TOPIC_TEMPLATES.GENERAL_TOPIC_TEMPLATE.TEMPLATE, generalEnum.Events.Event.Action.TRANSFER, generalEnum.Events.Event.Action.PREPARE)
const kafkaConfig = KafkaUtil.getKafkaConfig(Config.KAFKA_CONFIG, generalEnum.Kafka.Config.PRODUCER, generalEnum.Events.Event.Action.TRANSFER.toUpperCase(), generalEnum.Events.Event.Action.PREPARE.toUpperCase())
Logger.debug(`domain::transfer::prepare::messageProtocol - ${messageProtocol}`)
Logger.debug(`domain::transfer::prepare::topicConfig - ${topicConfig}`)
Logger.debug(`domain::transfer::prepare::kafkaConfig - ${kafkaConfig}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::prepare::messageProtocol - ${messageProtocol}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::prepare::topicConfig - ${topicConfig}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::prepare::kafkaConfig - ${kafkaConfig}`)
// TODO: re-enable once we are able to configure the log-level
// await span.debug({
// messageProtocol,
Expand Down Expand Up @@ -93,7 +93,7 @@ const prepare = async (headers, dataUri, payload, span) => {
* @returns {boolean} Returns true on successful publishing of message to kafka, throws error on failures
*/
const fulfil = async (headers, dataUri, payload, params, span) => {
Logger.debug('domain::transfer::fulfil::start(%s, %s, %s)', params.id, headers, payload)
Logger.isDebugEnabled && Logger.debug('domain::transfer::fulfil::start(%s, %s, %s)', params.id, headers, payload)
try {
const action = payload.transferState === generalEnum.Transfers.TransferState.ABORTED ? generalEnum.Events.Event.Action.REJECT : generalEnum.Events.Event.Action.COMMIT
const state = StreamingProtocol.createEventState(generalEnum.Events.EventStatus.SUCCESS.status, generalEnum.Events.EventStatus.SUCCESS.code, generalEnum.Events.EventStatus.SUCCESS.description)
Expand All @@ -102,9 +102,9 @@ const fulfil = async (headers, dataUri, payload, params, span) => {
let messageProtocol = StreamingProtocol.createMessageFromRequest(params.id, { headers, dataUri, params }, headers[generalEnum.Http.Headers.FSPIOP.DESTINATION], headers[generalEnum.Http.Headers.FSPIOP.SOURCE], metadata)
const topicConfig = KafkaUtil.createGeneralTopicConf(Config.KAFKA_CONFIG.TOPIC_TEMPLATES.GENERAL_TOPIC_TEMPLATE.TEMPLATE, generalEnum.Events.Event.Action.TRANSFER, generalEnum.Events.Event.Action.FULFIL)
const kafkaConfig = KafkaUtil.getKafkaConfig(Config.KAFKA_CONFIG, generalEnum.Kafka.Config.PRODUCER, generalEnum.Events.Event.Action.TRANSFER.toUpperCase(), generalEnum.Events.Event.Action.FULFIL.toUpperCase())
Logger.debug(`domain::transfer::fulfil::messageProtocol - ${messageProtocol}`)
Logger.debug(`domain::transfer::fulfil::topicConfig - ${topicConfig}`)
Logger.debug(`domain::transfer::fulfil::kafkaConfig - ${kafkaConfig}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::fulfil::messageProtocol - ${messageProtocol}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::fulfil::topicConfig - ${topicConfig}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::fulfil::kafkaConfig - ${kafkaConfig}`)
await span.debug({
messageProtocol,
topicName: topicConfig.topicName,
Expand Down Expand Up @@ -133,17 +133,17 @@ const fulfil = async (headers, dataUri, payload, params, span) => {
* @returns {boolean} Returns true on successful publishing of message to kafka, throws error on failures
*/
const getTransferById = async (headers, params, span) => {
Logger.info('domain::transfer::transferById::start(%s, %s, %s)', params.id, headers)
Logger.isInfoEnabled && Logger.info('domain::transfer::transferById::start(%s, %s, %s)', params.id, headers)
try {
const state = StreamingProtocol.createEventState(generalEnum.Events.EventStatus.SUCCESS.status, generalEnum.Events.EventStatus.SUCCESS.code, generalEnum.Events.EventStatus.SUCCESS.description)
const event = StreamingProtocol.createEventMetadata(generalEnum.Events.Event.Type.GET, generalEnum.Events.Event.Type.GET, state)
const metadata = StreamingProtocol.createMetadata(params.id, event)
let messageProtocol = StreamingProtocol.createMessageFromRequest(params.id, { headers, dataUri: undefined, params }, headers[generalEnum.Http.Headers.FSPIOP.DESTINATION], headers[generalEnum.Http.Headers.FSPIOP.SOURCE], metadata)
const topicConfig = KafkaUtil.createGeneralTopicConf(Config.KAFKA_CONFIG.TOPIC_TEMPLATES.GENERAL_TOPIC_TEMPLATE.TEMPLATE, generalEnum.Events.Event.Action.TRANSFER, generalEnum.Events.Event.Action.GET)
const kafkaConfig = KafkaUtil.getKafkaConfig(Config.KAFKA_CONFIG, generalEnum.Kafka.Config.PRODUCER, generalEnum.Events.Event.Action.TRANSFER.toUpperCase(), generalEnum.Events.Event.Action.GET.toUpperCase())
Logger.info(`domain::transfer::get::messageProtocol - ${messageProtocol}`)
Logger.info(`domain::transfer::get::topicConfig - ${topicConfig}`)
Logger.info(`domain::transfer::get::kafkaConfig - ${kafkaConfig}`)
Logger.isInfoEnabled && Logger.info(`domain::transfer::get::messageProtocol - ${messageProtocol}`)
Logger.isInfoEnabled && Logger.info(`domain::transfer::get::topicConfig - ${topicConfig}`)
Logger.isInfoEnabled && Logger.info(`domain::transfer::get::kafkaConfig - ${kafkaConfig}`)
// TODO: re-enable once we are able to configure the log-level
// await span.debug({
// messageProtocol,
Expand Down Expand Up @@ -175,17 +175,17 @@ const getTransferById = async (headers, params, span) => {
* @returns {boolean} Returns true on successful publishing of message to kafka, throws error on failures
*/
const transferError = async (headers, dataUri, payload, params, span) => {
Logger.debug('domain::transfer::abort::start(%s, %s, %s)', params.id, headers, payload)
Logger.isDebugEnabled && Logger.debug('domain::transfer::abort::start(%s, %s, %s)', params.id, headers, payload)
try {
const state = StreamingProtocol.createEventState(generalEnum.Events.EventStatus.SUCCESS.status, generalEnum.Events.EventStatus.SUCCESS.code, generalEnum.Events.EventStatus.SUCCESS.description)
const event = StreamingProtocol.createEventMetadata(generalEnum.Events.Event.Type.FULFIL, generalEnum.Events.Event.Action.ABORT, state)
const metadata = StreamingProtocol.createMetadata(params.id, event)
let messageProtocol = StreamingProtocol.createMessageFromRequest(params.id, { headers, dataUri, params }, headers[generalEnum.Http.Headers.FSPIOP.DESTINATION], headers[generalEnum.Http.Headers.FSPIOP.SOURCE], metadata)
const topicConfig = KafkaUtil.createGeneralTopicConf(Config.KAFKA_CONFIG.TOPIC_TEMPLATES.GENERAL_TOPIC_TEMPLATE.TEMPLATE, generalEnum.Events.Event.Action.TRANSFER, generalEnum.Events.Event.Action.FULFIL)
const kafkaConfig = KafkaUtil.getKafkaConfig(Config.KAFKA_CONFIG, generalEnum.Kafka.Config.PRODUCER, generalEnum.Events.Event.Action.TRANSFER.toUpperCase(), generalEnum.Events.Event.Action.FULFIL.toUpperCase())
Logger.debug(`domain::transfer::abort::messageProtocol - ${messageProtocol}`)
Logger.debug(`domain::transfer::abort::topicConfig - ${topicConfig}`)
Logger.debug(`domain::transfer::abort::kafkaConfig - ${kafkaConfig}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::abort::messageProtocol - ${messageProtocol}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::abort::topicConfig - ${topicConfig}`)
Logger.isDebugEnabled && Logger.debug(`domain::transfer::abort::kafkaConfig - ${kafkaConfig}`)
// TODO: re-enable once we are able to configure the log-level
// await span.debug({
// messageProtocol,
Expand Down
2 changes: 1 addition & 1 deletion src/handlers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ Program.command('handler') // sub-command name, coffeeType = type, required
.action(async (args) => {
const handlerList = []
if (args.notification && typeof args.notification === 'boolean') {
Logger.debug('CLI: Executing --notification')
Logger.isDebugEnabled && Logger.debug('CLI: Executing --notification')
const handler = {
type: 'notification',
enabled: true
Expand Down
Loading

0 comments on commit 12c8866

Please sign in to comment.