loggers.js 3.22 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
const Model = require('objection').Model
const path = require('path')
const fs = require('fs-extra')
const _ = require('lodash')
const yaml = require('js-yaml')
const commonHelper = require('../helpers/common')

/* global WIKI */

/**
 * Logger model
 */
module.exports = class Logger extends Model {
  static get tableName() { return 'loggers' }
15
  static get idColumn() { return 'key' }
16 17 18 19 20 21 22 23 24

  static get jsonSchema () {
    return {
      type: 'object',
      required: ['key', 'isEnabled'],

      properties: {
        key: {type: 'string'},
        isEnabled: {type: 'boolean'},
25
        level: {type: 'string'}
26 27 28 29
      }
    }
  }

30 31 32 33
  static get jsonAttributes() {
    return ['config']
  }

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113
  static async getLoggers() {
    return WIKI.models.loggers.query()
  }

  static async refreshLoggersFromDisk() {
    let trx
    try {
      const dbLoggers = await WIKI.models.loggers.query()

      // -> Fetch definitions from disk
      const loggersDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/logging'))
      let diskLoggers = []
      for (let dir of loggersDirs) {
        const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/logging', dir, 'definition.yml'), 'utf8')
        diskLoggers.push(yaml.safeLoad(def))
      }
      WIKI.data.loggers = diskLoggers.map(logger => ({
        ...logger,
        props: commonHelper.parseModuleProps(logger.props)
      }))

      // -> Insert new loggers
      let newLoggers = []
      for (let logger of WIKI.data.loggers) {
        if (!_.some(dbLoggers, ['key', logger.key])) {
          newLoggers.push({
            key: logger.key,
            isEnabled: (logger.key === 'console'),
            level: logger.defaultLevel,
            config: _.transform(logger.props, (result, value, key) => {
              _.set(result, key, value.default)
              return result
            }, {})
          })
        } else {
          const loggerConfig = _.get(_.find(dbLoggers, ['key', logger.key]), 'config', {})
          await WIKI.models.loggers.query().patch({
            config: _.transform(logger.props, (result, value, key) => {
              if (!_.has(result, key)) {
                _.set(result, key, value.default)
              }
              return result
            }, loggerConfig)
          }).where('key', logger.key)
        }
      }
      if (newLoggers.length > 0) {
        trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
        for (let logger of newLoggers) {
          await WIKI.models.loggers.query(trx).insert(logger)
        }
        await trx.commit()
        WIKI.logger.info(`Loaded ${newLoggers.length} new loggers: [ OK ]`)
      } else {
        WIKI.logger.info(`No new loggers found: [ SKIPPED ]`)
      }
    } catch (err) {
      WIKI.logger.error(`Failed to scan or load new loggers: [ FAILED ]`)
      WIKI.logger.error(err)
      if (trx) {
        trx.rollback()
      }
    }
  }

  static async pageEvent({ event, page }) {
    const loggers = await WIKI.models.storage.query().where('isEnabled', true)
    if (loggers && loggers.length > 0) {
      _.forEach(loggers, logger => {
        WIKI.queue.job.syncStorage.add({
          event,
          logger,
          page
        }, {
          removeOnComplete: true
        })
      })
    }
  }
}