Commit cd3f88ba authored by Nick's avatar Nick

refactor: removed redis + new scheduler engine

parent e90873e1
......@@ -28,19 +28,18 @@ docker-dev-up: ## Run dockerized dev environment
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki yarn dev
docker-dev-down: ## Shutdown dockerized dev environment
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . down
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . down --remove-orphans
docker-dev-rebuild: ## Rebuild dockerized dev image
rm -rf ./node_modules
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . build --no-cache --force-rm
docker-dev-clean: ## Clean DB, redis and data folders
docker-dev-clean: ## Clean DB and data folders
rm -rf ./data
[[ "${DEVDB}" == "postgres" ]] && docker-compose -f ./dev/docker-postgres/docker-compose.yml -p wiki --project-directory . exec db psql --dbname=wiki --username=wikijs --command='DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public' || true
[[ "${DEVDB}" == "mysql" || "${DEVDB}" == "mariadb" ]] && docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec db mysql -uroot -p'wikijsrocks' -e 'DROP SCHEMA IF EXISTS wiki; CREATE SCHEMA wiki;' || true
[[ "${DEVDB}" == "mssql" ]] && docker-compose -f ./dev/docker-mssql/docker-compose.yml -p wiki --project-directory . exec db /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P 'W1kiJSR0cks!' -Q 'DROP DATABASE IF EXISTS wiki; CREATE DATABASE wiki;' || true
[[ "${DEVDB}" == "sqlite" ]] && docker-compose -f ./dev/docker-sqlite/docker-compose.yml -p wiki --project-directory . exec wiki rm -rf /wiki/db.sqlite || true
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec redis redis-cli flushall
docker-dev-bash: ## Rebuild dockerized dev image
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki bash
......
......@@ -15,10 +15,6 @@ query {
ramTotal
workingDirectory
nodeVersion
redisVersion
redisUsedRAM
redisTotalRAM
redisHost
}
}
}
......@@ -31,23 +31,20 @@ db:
# SQLite only:
storage: path/to/database.sqlite
# ---------------------------------------------------------------------
# Redis
# ---------------------------------------------------------------------
# Redis 3.2 or later required
redis:
host: localhost
port: 6379
db: 0
password: null
#######################################################################
# ADVANCED OPTIONS #
#######################################################################
# Do not change unless you know what you are doing!
# ---------------------------------------------------------------------
# Use X-Forwarded-For header
# ---------------------------------------------------------------------
# Enable only if Wiki.js is behind a reverse-proxy (nginx, apache, etc)
# or a cloud proxying services like Cloudflare.
trustProxy: false
# ---------------------------------------------------------------------
# SSL/TLS Settings
# ---------------------------------------------------------------------
# Consider using a reverse proxy (e.g. nginx) if you require more
......
......@@ -13,4 +13,5 @@ redis:
port: $(REDIS_PORT)
db: $(REDIS_DB)
password: $(REDIS_PASS)
trustProxy: $(TRUST_PROXY)
logLevel: info
......@@ -7,8 +7,4 @@ db:
user: wikijs
pass: wikijsrocks
db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info
......@@ -3,16 +3,6 @@
version: "3"
services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db:
image: mariadb:10.3
environment:
......@@ -44,7 +34,6 @@ services:
dockerfile: dev/docker-mariadb/Dockerfile
depends_on:
- db
- redis
networks:
- wikinet
ports:
......
#!/bin/sh
echo "Waiting for redis and mariadb to start up..."
bash ./dev/docker-common/wait.sh redis:6379
echo "Waiting for mariadb to start up..."
bash ./dev/docker-common/wait.sh db:3306
echo "=== READY ==="
tail -f /dev/null
......@@ -7,8 +7,4 @@ db:
user: SA
pass: W1kiJSR0cks!
db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info
......@@ -3,16 +3,6 @@
version: "3"
services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db:
image: mcr.microsoft.com/mssql/server:2017-latest
environment:
......@@ -34,7 +24,6 @@ services:
dockerfile: dev/docker-mssql/Dockerfile
depends_on:
- db
- redis
networks:
- wikinet
ports:
......
#!/bin/sh
echo "Waiting for redis and mssql to start up..."
bash ./dev/docker-common/wait.sh redis:6379
echo "Waiting for mssql to start up..."
bash ./dev/docker-common/wait.sh db:1433
echo "=== READY ==="
tail -f /dev/null
......@@ -7,8 +7,4 @@ db:
user: wikijs
pass: wikijsrocks
db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info
......@@ -3,16 +3,6 @@
version: "3"
services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db:
image: mysql:5.7
environment:
......@@ -44,7 +34,6 @@ services:
dockerfile: dev/docker-mysql/Dockerfile
depends_on:
- db
- redis
networks:
- wikinet
ports:
......
#!/bin/sh
echo "Waiting for redis and mysql to start up..."
bash ./dev/docker-common/wait.sh redis:6379
echo "Waiting for mysql to start up..."
bash ./dev/docker-common/wait.sh db:3306
echo "=== READY ==="
tail -f /dev/null
......@@ -7,8 +7,4 @@ db:
user: wikijs
pass: wikijsrocks
db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info
......@@ -3,16 +3,6 @@
version: "3"
services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db:
image: postgres:9-alpine
environment:
......@@ -43,7 +33,6 @@ services:
dockerfile: dev/docker-postgres/Dockerfile
depends_on:
- db
- redis
networks:
- wikinet
ports:
......
#!/bin/sh
echo "Waiting for redis and postgres to start up..."
bash ./dev/docker-common/wait.sh redis:6379
echo "Waiting for postgres to start up..."
bash ./dev/docker-common/wait.sh db:5432
echo "=== READY ==="
tail -f /dev/null
......@@ -3,8 +3,4 @@ bindIP: 0.0.0.0
db:
type: sqlite
storage: /wiki/db.sqlite
redis:
host: redis
port: 6379
db: 0
logLevel: info
......@@ -3,22 +3,10 @@
version: "3"
services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
wiki:
build:
context: .
dockerfile: dev/docker-sqlite/Dockerfile
depends_on:
- redis
networks:
- wikinet
ports:
......@@ -26,7 +14,6 @@ services:
volumes:
- .:/wiki
- /wiki/node_modules
command: ["sh", "./dev/docker-sqlite/init.sh"]
networks:
wikinet:
......
#!/bin/sh
echo "Waiting for redis to start up..."
bash ./dev/docker-common/wait.sh redis:6379
echo "=== READY ==="
tail -f /dev/null
version: "3"
services:
redis:
image: redis:4-alpine
logging:
driver: "none"
networks:
- wikinet
db:
image: postgres:9-alpine
environment:
......
const { Client } = require('pg')
const fs = require('fs')
const path = require('path')
const yaml = require('js-yaml')
let config = {}
try {
conf = yaml.safeLoad(
cfgHelper.parseConfigValue(
fs.readFileSync(path.join(process.cwd(), 'dev/docker/config.yml'), 'utf8')
)
)
} catch (err) {
console.error(err.message)
process.exit(1)
}
const client = new Client({
user: config.db.username,
host: config.db.host,
database: config.db.database,
password: config.db.password,
port: config.db.port,
})
async function main () {
await client.connect()
await client.query('DROP SCHEMA public CASCADE;')
await client.query('CREATE SCHEMA public;')
await client.end()
console.info('Success.')
}
main()
......@@ -48,14 +48,11 @@
"bcryptjs-then": "1.0.1",
"bluebird": "3.5.3",
"body-parser": "1.18.3",
"bull": "3.6.0",
"chalk": "2.4.2",
"cheerio": "1.0.0-rc.2",
"child-process-promise": "2.2.1",
"chokidar": "2.0.4",
"clean-css": "4.2.1",
"compression": "1.7.3",
"connect-redis": "3.4.0",
"cookie-parser": "1.4.3",
"cors": "2.8.5",
"custom-error-instance": "2.1.1",
......@@ -63,14 +60,10 @@
"diff": "4.0.1",
"diff2html": "2.7.0",
"dotize": "^0.3.0",
"execa": "1.0.0",
"express": "4.16.4",
"express-brute": "1.0.1",
"express-brute-redis": "0.0.1",
"express-session": "1.15.6",
"file-type": "10.7.1",
"filesize": "4.0.0",
"follow-redirects": "1.6.1",
"fs-extra": "7.0.1",
"getos": "3.1.1",
"graphql": "14.1.1",
......@@ -83,7 +76,6 @@
"i18next-localstorage-cache": "1.1.1",
"i18next-node-fs-backend": "2.1.1",
"image-size": "0.7.1",
"ioredis": "4.6.2",
"js-base64": "2.5.1",
"js-binary": "1.2.0",
"js-yaml": "3.12.1",
......@@ -117,7 +109,6 @@
"node-2fa": "1.1.2",
"node-cache": "4.2.0",
"nodemailer": "5.1.1",
"oauth2orize": "1.11.0",
"objection": "1.5.3",
"ora": "3.0.0",
"passport": "0.4.0",
......@@ -146,7 +137,6 @@
"pug": "2.0.3",
"qr-image": "3.2.0",
"raven": "2.6.4",
"read-chunk": "3.0.0",
"remove-markdown": "0.3.0",
"request": "2.88.0",
"request-promise": "4.2.2",
......
......@@ -15,11 +15,6 @@ defaults:
pass: wikijsrocks
db: wiki
storage: ./db.sqlite
redis:
host: localhost
port: 6379
db: 0
password: null
ssl:
enabled: false
bindIP: 0.0.0.0
......@@ -53,30 +48,15 @@ localeNamespaces:
- auth
- common
jobs:
fetchGraphLocale:
onInit: false
cron: false
concurrency: 0
purgeUploads:
onInit: true
cron: '*/15 * * * *'
concurrency: 0
renderPage:
onInit: false
cron: false
concurrency: 1
schedule: PT15M
syncGraphLocales:
onInit: true
cron: '0 0 * * *'
concurrency: 0
schedule: P1D
syncGraphUpdates:
onInit: true
cron: '0 0 * * *'
concurrency: 0
syncStorage:
onInit: false
cron: false
concurrency: 1
schedule: P1D
groups:
defaultPermissions:
- 'manage:pages'
......
const NodeCache = require('node-cache')
module.exports = {
init() {
return new NodeCache()
}
}
const moment = require('moment')
const childProcess = require('child_process')
module.exports = class Job {
constructor({
name,
immediate = false,
schedule = 'P1D',
repeat = false,
worker = false
}) {
this.finished = Promise.resolve()
this.name = name
this.immediate = immediate
this.schedule = moment.duration(schedule)
this.repeat = repeat
this.worker = worker
}
/**
* Start Job
*
* @param {Object} data Job Data
*/
start(data) {
if (this.immediate) {
this.invoke(data)
} else {
this.queue(data)
}
}
/**
* Queue the next job run according to the wait duration
*
* @param {Object} data Job Data
*/
queue(data) {
this.timeout = setTimeout(this.invoke.bind(this), this.schedule.asMilliseconds(), data)
}
/**
* Run the actual job
*
* @param {Object} data Job Data
*/
async invoke(data) {
try {
if (this.worker) {
const proc = childProcess.fork(`server/core/worker.js`, [
`--job=${this.name}`,
`--data=${data}`
], {
cwd: WIKI.ROOTPATH
})
this.finished = new Promise((resolve, reject) => {
proc.on('exit', (code, signal) => {
if (code === 0) {
resolve()
} else {
reject(signal)
}
proc.kill()
})
})
} else {
this.finished = require(`../jobs/${this.name}`)(data)
}
await this.finished
} catch (err) {
WIKI.logger.warn(err)
}
if (this.repeat) {
this.queue(data)
}
}
/**
* Stop any future job invocation from occuring
*/
stop() {
clearTimeout(this.timeout)
}
}
......@@ -10,10 +10,10 @@ module.exports = {
WIKI.logger.info('=======================================')
WIKI.models = require('./db').init()
WIKI.redis = require('./redis').init()
WIKI.queue = require('./queue').init()
await this.preBootMaster()
await WIKI.models.onReady
await WIKI.configSvc.loadFromDb()
this.bootMaster()
},
/**
......@@ -21,11 +21,10 @@ module.exports = {
*/
async preBootMaster() {
try {
await WIKI.models.onReady
await WIKI.configSvc.loadFromDb()
await WIKI.queue.clean()
await this.initTelemetry()
WIKI.cache = require('./cache').init()
WIKI.scheduler = require('./scheduler').init()
WIKI.events = new EventEmitter()
WIKI.redisSub = require('./redis').subscribe()
} catch (err) {
WIKI.logger.error(err)
process.exit(1)
......@@ -40,7 +39,7 @@ module.exports = {
WIKI.logger.info('Starting setup wizard...')
require('../setup')()
} else {
await this.initTelemetry()
await this.preBootMaster()
await require('../master')()
this.postBootMaster()
}
......@@ -62,7 +61,7 @@ module.exports = {
await WIKI.auth.activateStrategies()
await WIKI.models.storage.initTargets()
await WIKI.queue.start()
WIKI.scheduler.start()
},
/**
* Init Telemetry
......
......@@ -27,15 +27,6 @@ module.exports = {
// Load current language + namespaces
this.refreshNamespaces(true)
// Listen for localization events
WIKI.events.on('localization', (action) => {
switch (action) {
case 'reload':
this.refreshNamespaces()
break
}
})
return this
},
/**
......
const path = require('path')
const Bull = require('bull')
const Promise = require('bluebird')
const _ = require('lodash')
/* global WIKI */
module.exports = {
job: {},
init() {
_.forOwn(WIKI.data.jobs, (queueParams, queueName) => {
this.job[queueName] = new Bull(queueName, {
prefix: `queue`,
redis: WIKI.config.redis
})
if (queueParams.concurrency > 0) {
this.job[queueName].process(queueParams.concurrency, path.join(WIKI.SERVERPATH, `jobs/${_.kebabCase(queueName)}.js`))
} else {
this.job[queueName].process(path.join(WIKI.SERVERPATH, `jobs/${_.kebabCase(queueName)}.js`))
}
})
return this
},
start() {
_.forOwn(WIKI.data.jobs, (queueParams, queueName) => {
if (queueParams.onInit) {
this.job[queueName].add({}, {
removeOnComplete: true
})
}
if (queueParams.cron) {
this.job[queueName].add({}, {
repeat: { cron: queueParams.cron },
removeOnComplete: true
})
}
})
},
async quit() {
for (const queueName in this.job) {
await this.job[queueName].close()
}
},
async clean() {
return Promise.each(_.keys(WIKI.data.jobs), queueName => {
return new Promise((resolve, reject) => {
let keyStream = WIKI.redis.scanStream({
match: `queue:${queueName}:*`
})
keyStream.on('data', resultKeys => {
if (resultKeys.length > 0) {
WIKI.redis.del(resultKeys)
}
})
keyStream.on('end', resolve)
})
}).then(() => {
WIKI.logger.info('Purging old queue jobs: [ OK ]')
}).return(true).catch(err => {
WIKI.logger.error(err)
})
}
}
const Redis = require('ioredis')
const { isPlainObject } = require('lodash')
/* global WIKI */
module.exports = {
init() {
if (isPlainObject(WIKI.config.redis)) {
let red = new Redis(WIKI.config.redis)
red.on('ready', () => {
WIKI.logger.info('Redis connection: [ OK ]')
})
red.on('error', () => {
WIKI.logger.error('Failed to connect to Redis instance!')
process.exit(1)
})
return red
} else {
WIKI.logger.error('Invalid Redis configuration!')
process.exit(1)
}
},
subscribe() {
let red = this.init()
red.on('message', (channel, msg) => {
WIKI.events.emit(channel, msg)
})
red.subscribe('localization', 'updates', (err, count) => {
if (err) {
WIKI.logger.error(err)
process.exit(1)
}
})
return red
}
}
const Job = require('./job')
const _ = require('lodash')
/* global WIKI */
module.exports = {
jobs: [],
init() {
return this
},
start() {
_.forOwn(WIKI.data.jobs, (queueParams, queueName) => {
this.registerJob({
name: _.kebabCase(queueName),
immediate: queueParams.onInit,
schedule: queueParams.schedule,
repeat: true
})
})
},
registerJob(opts, data) {
const job = new Job(opts)
job.start(data)
if (job.repeat) {
this.jobs.push(job)
}
return job
},
stop() {
this.jobs.forEach(job => {
job.stop()
})
}
}
......@@ -15,15 +15,6 @@ module.exports = {
minimumNodeRequired: '10.12.0'
},
init() {
// Listen for updates events
WIKI.events.on('updates', (infoRaw) => {
try {
this.updates = JSON.parse(infoRaw)
} catch (err) {
WIKI.logger.warn('Failed to parse updates info.')
}
})
// Clear content cache
fs.emptyDir(path.join(WIKI.ROOTPATH, 'data/cache'))
......
......@@ -10,9 +10,10 @@ let WIKI = {
global.WIKI = WIKI
WIKI.configSvc.init()
// ----------------------------------------
// Init Logger
// ----------------------------------------
WIKI.logger = require('./logger').init('JOB')
const args = require('yargs').argv
;(async () => {
await require(`../jobs/${args.job}`)(args.data)
process.exit(0)
})()
......@@ -11,7 +11,7 @@ module.exports = {
*/
async getMigrations() {
const absoluteDir = path.join(WIKI.SERVERPATH, 'db/migrations')
const migrationFiles = await fs.readdirAsync(absoluteDir)
const migrationFiles = await fs.readdir(absoluteDir)
return migrationFiles.sort(semver.compare).map(m => ({
file: m,
directory: absoluteDir
......
......@@ -12,9 +12,9 @@ module.exports = {
},
LocalizationQuery: {
async locales(obj, args, context, info) {
let remoteLocales = await WIKI.redis.get('locales')
let remoteLocales = await WIKI.cache.get('locales')
let localLocales = await WIKI.models.locales.query().select('code', 'isRTL', 'name', 'nativeName', 'createdAt', 'updatedAt')
remoteLocales = (remoteLocales) ? JSON.parse(remoteLocales) : localLocales
remoteLocales = (remoteLocales) ? remoteLocales : localLocales
return _.map(remoteLocales, rl => {
let isInstalled = _.some(localLocales, ['code', rl.code])
return {
......@@ -39,12 +39,11 @@ module.exports = {
LocalizationMutation: {
async downloadLocale(obj, args, context) {
try {
const job = await WIKI.queue.job.fetchGraphLocale.add({
locale: args.locale
}, {
timeout: 30000
})
await job.finished()
const job = await WIKI.scheduler.registerJob({
name: 'fetch-graph-locale',
immediate: true
}, args.locale)
await job.finished
return {
responseResult: graphHelper.generateSuccess('Locale downloaded successfully')
}
......
......@@ -98,18 +98,6 @@ module.exports = {
nodeVersion() {
return process.version.substr(1)
},
redisVersion() {
return WIKI.redis.serverInfo.redis_version
},
redisUsedRAM() {
return WIKI.redis.serverInfo.used_memory_human
},
redisTotalRAM() {
return _.get(WIKI.redis.serverInfo, 'total_system_memory_human', 'N/A')
},
redisHost() {
return WIKI.redis.options.host
},
async groupsTotal() {
const total = await WIKI.models.groups.query().count('* as total').first().pluck('total')
return _.toSafeInteger(total)
......
......@@ -38,10 +38,6 @@ type SystemInfo {
pagesTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:pages", "delete:pages"])
platform: String @auth(requires: ["manage:system"])
ramTotal: String @auth(requires: ["manage:system"])
redisHost: String @auth(requires: ["manage:system"])
redisTotalRAM: String @auth(requires: ["manage:system"])
redisUsedRAM: String @auth(requires: ["manage:system"])
redisVersion: String @auth(requires: ["manage:system"])
usersTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:groups", "write:groups", "manage:users", "write:users"])
workingDirectory: String @auth(requires: ["manage:system"])
}
require('../core/worker')
const _ = require('lodash')
const { createApolloFetch } = require('apollo-fetch')
/* global WIKI */
WIKI.redis = require('../core/redis').init()
WIKI.models = require('../core/db').init()
module.exports = async (job) => {
WIKI.logger.info(`Fetching locale ${job.data.locale} from Graph endpoint...`)
module.exports = async (localeCode) => {
WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint...`)
try {
await WIKI.configSvc.loadFromDb()
const apollo = createApolloFetch({
uri: WIKI.config.graphEndpoint
})
......@@ -26,7 +21,7 @@ module.exports = async (job) => {
}
}`,
variables: {
code: job.data.locale
code: localeCode
}
})
const strings = _.get(respStrings, 'data.localization.strings', [])
......@@ -36,12 +31,12 @@ module.exports = async (job) => {
_.set(lcObj, row.key.replace(':', '.'), row.value)
})
const locales = await WIKI.redis.get('locales')
const locales = await WIKI.cache.get('locales')
if (locales) {
const currentLocale = _.find(JSON.parse(locales), ['code', job.data.locale]) || {}
await WIKI.models.locales.query().delete().where('code', job.data.locale)
const currentLocale = _.find(locales, ['code', localeCode]) || {}
await WIKI.models.locales.query().delete().where('code', localeCode)
await WIKI.models.locales.query().insert({
code: job.data.locale,
code: localeCode,
strings: lcObj,
isRTL: currentLocale.isRTL,
name: currentLocale.name,
......@@ -51,11 +46,11 @@ module.exports = async (job) => {
throw new Error('Failed to fetch cached locales list! Restart server to resolve this issue.')
}
await WIKI.redis.publish('localization', 'reload')
await WIKI.lang.refreshNamespaces()
WIKI.logger.info(`Fetching locale ${job.data.locale} from Graph endpoint: [ COMPLETED ]`)
WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint: [ COMPLETED ]`)
} catch (err) {
WIKI.logger.error(`Fetching locale ${job.data.locale} from Graph endpoint: [ FAILED ]`)
WIKI.logger.error(`Fetching locale ${localeCode} from Graph endpoint: [ FAILED ]`)
WIKI.logger.error(err.message)
}
}
require('../core/worker')
/* global WIKI */
const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs-extra'))
const fs = require('fs-extra')
const moment = require('moment')
const path = require('path')
module.exports = async (job) => {
module.exports = async () => {
WIKI.logger.info('Purging orphaned upload files...')
try {
const uplTempPath = path.resolve(process.cwd(), WIKI.config.paths.data, 'uploads')
const ls = await fs.readdirAsync(uplTempPath)
const ls = await fs.readdir(uplTempPath)
const fifteenAgo = moment().subtract(15, 'minutes')
await Promise.map(ls, (f) => {
return fs.statAsync(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } })
return fs.stat(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } })
}).filter((s) => { return s.stat.isFile() }).then((arrFiles) => {
return Promise.map(arrFiles, (f) => {
if (moment(f.stat.ctime).isBefore(fifteenAgo, 'minute')) {
return fs.unlinkAsync(path.join(uplTempPath, f.filename))
return fs.unlink(path.join(uplTempPath, f.filename))
}
})
})
......
require('../core/worker')
const _ = require('lodash')
const cheerio = require('cheerio')
/* global WIKI */
WIKI.models = require('../core/db').init()
module.exports = async (job) => {
WIKI.logger.info(`Rendering page ${job.data.page.path}...`)
module.exports = async (pageId) => {
WIKI.logger.info(`Rendering page ID ${pageId}...`)
try {
let output = job.data.page.content
for (let core of job.data.pipeline) {
WIKI.models = require('../core/db').init()
const page = await WIKI.models.pages.getPageFromDb(pageId)
if (!page) {
throw new Error('Invalid Page Id')
}
await WIKI.models.renderers.fetchDefinitions()
const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType)
let output = page.content
for (let core of pipeline) {
const renderer = require(`../modules/rendering/${_.kebabCase(core.key)}/renderer.js`)
output = await renderer.render.call({
config: core.config,
children: core.children,
page: job.data.page,
page: page,
input: output
})
}
......@@ -61,18 +67,20 @@ module.exports = async (job) => {
render: output,
toc: JSON.stringify(toc.root)
})
.where('id', job.data.page.id)
.where('id', pageId)
// Save to cache
await WIKI.models.pages.savePageToCache({
...job.data.page,
...page,
render: output,
toc: JSON.stringify(toc.root)
})
WIKI.logger.info(`Rendering page ${job.data.page.path}: [ COMPLETED ]`)
await WIKI.models.knex.destroy()
WIKI.logger.info(`Rendering page ID ${pageId}: [ COMPLETED ]`)
} catch (err) {
WIKI.logger.error(`Rendering page ${job.data.page.path}: [ FAILED ]`)
WIKI.logger.error(`Rendering page ID ${pageId}: [ FAILED ]`)
WIKI.logger.error(err.message)
}
}
require('../core/worker')
const _ = require('lodash')
const { createApolloFetch } = require('apollo-fetch')
/* global WIKI */
WIKI.redis = require('../core/redis').init()
WIKI.models = require('../core/db').init()
module.exports = async (job) => {
module.exports = async () => {
WIKI.logger.info('Syncing locales with Graph endpoint...')
try {
await WIKI.configSvc.loadFromDb()
const apollo = createApolloFetch({
uri: WIKI.config.graphEndpoint
})
......@@ -33,7 +28,7 @@ module.exports = async (job) => {
}`
})
const locales = _.sortBy(_.get(respList, 'data.localization.locales', []), 'name').map(lc => ({...lc, isInstalled: (lc.code === 'en')}))
WIKI.redis.set('locales', JSON.stringify(locales))
WIKI.cache.set('locales', locales)
const currentLocale = _.find(locales, ['code', WIKI.config.lang.code])
// -> Download locale strings
......@@ -68,7 +63,7 @@ module.exports = async (job) => {
}).where('code', WIKI.config.lang.code)
}
await WIKI.redis.publish('localization', 'reload')
await WIKI.lang.refreshNamespaces()
WIKI.logger.info('Syncing locales with Graph endpoint: [ COMPLETED ]')
} catch (err) {
......
require('../core/worker')
const _ = require('lodash')
const { createApolloFetch } = require('apollo-fetch')
/* global WIKI */
WIKI.redis = require('../core/redis').init()
WIKI.models = require('../core/db').init()
module.exports = async (job) => {
module.exports = async () => {
WIKI.logger.info(`Fetching latest updates from Graph endpoint...`)
try {
await WIKI.configSvc.loadFromDb()
const apollo = createApolloFetch({
uri: WIKI.config.graphEndpoint
})
......@@ -33,9 +28,10 @@ module.exports = async (job) => {
version: WIKI.version
}
})
const info = _.get(resp, 'data.releases.checkForUpdates', {})
await WIKI.redis.publish('updates', JSON.stringify(info))
const info = _.get(resp, 'data.releases.checkForUpdates', false)
if (info) {
WIKI.system.updates = info
}
WIKI.logger.info(`Fetching latest updates from Graph endpoint: [ COMPLETED ]`)
} catch (err) {
......
......@@ -46,7 +46,9 @@ module.exports = async () => {
app.use(mw.security)
app.use(cors(WIKI.config.cors))
app.options('*', cors(WIKI.config.cors))
app.enable('trust proxy')
if (WIKI.config.trustProxy) {
app.enable('trust proxy')
}
// ----------------------------------------
// Public Assets
......
......@@ -23,15 +23,15 @@ module.exports = class Navigation extends Model {
static async getTree({ cache = false } = {}) {
if (cache) {
const navTreeCached = await WIKI.redis.get('nav:sidebar')
const navTreeCached = await WIKI.cache.get('nav:sidebar')
if (navTreeCached) {
return JSON.parse(navTreeCached)
return navTreeCached
}
}
const navTree = await WIKI.models.navigation.query().findOne('key', 'site')
if (navTree) {
if (cache) {
await WIKI.redis.set('nav:sidebar', JSON.stringify(navTree.config), 'EX', 300)
await WIKI.cache.set('nav:sidebar', navTree.config, 300)
}
return navTree.config
} else {
......
......@@ -217,15 +217,12 @@ module.exports = class Page extends Model {
}
static async renderPage(page) {
const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType)
const renderJob = await WIKI.queue.job.renderPage.add({
page,
pipeline
}, {
removeOnComplete: true,
removeOnFail: true
})
return renderJob.finished()
const renderJob = await WIKI.scheduler.registerJob({
name: 'render-page',
immediate: true,
worker: true
}, page.id)
return renderJob.finished
}
static async getPage(opts) {
......@@ -240,6 +237,7 @@ module.exports = class Page extends Model {
}
static async getPageFromDb(opts) {
const queryModeID = _.isNumber(opts)
return WIKI.models.pages.query()
.column([
'pages.*',
......@@ -252,11 +250,14 @@ module.exports = class Page extends Model {
])
.joinRelation('author')
.joinRelation('creator')
.where({
.where(queryModeID ? {
'pages.id': opts
} : {
'pages.path': opts.path,
'pages.localeCode': opts.locale
})
.andWhere(builder => {
if (queryModeID) return
builder.where({
'pages.isPublished': true
}).orWhere({
......@@ -265,6 +266,7 @@ module.exports = class Page extends Model {
})
})
.andWhere(builder => {
if (queryModeID) return
if (opts.isPrivate) {
builder.where({ 'pages.isPrivate': true, 'pages.privateNS': opts.privateNS })
} else {
......
......@@ -35,22 +35,26 @@ module.exports = class Renderer extends Model {
return WIKI.models.renderers.query()
}
static async fetchDefinitions() {
const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
let diskRenderers = []
for (let dir of rendererDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8')
diskRenderers.push(yaml.safeLoad(def))
}
WIKI.data.renderers = diskRenderers.map(renderer => ({
...renderer,
props: commonHelper.parseModuleProps(renderer.props)
}))
}
static async refreshRenderersFromDisk() {
let trx
try {
const dbRenderers = await WIKI.models.renderers.query()
// -> Fetch definitions from disk
const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
let diskRenderers = []
for (let dir of rendererDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8')
diskRenderers.push(yaml.safeLoad(def))
}
WIKI.data.renderers = diskRenderers.map(renderer => ({
...renderer,
props: commonHelper.parseModuleProps(renderer.props)
}))
await WIKI.models.renderers.fetchDefinitions()
// -> Insert new Renderers
let newRenderers = []
......
......@@ -107,6 +107,11 @@ module.exports = class Storage extends Model {
target.fn.config = target.config
target.fn.mode = target.mode
await target.fn.init()
// if (target.schedule) {
// WIKI.scheduler.registerJob({
// name:
// }, target.fn.sync)
// }
}
} catch (err) {
WIKI.logger.warn(err)
......
......@@ -294,7 +294,7 @@ module.exports = class User extends Model {
static async loginTFA(opts, context) {
if (opts.securityCode.length === 6 && opts.loginToken.length === 64) {
let result = await WIKI.redis.get(`tfa:${opts.loginToken}`)
let result = null // await WIKI.redis.get(`tfa:${opts.loginToken}`)
if (result) {
let userId = _.toSafeInteger(result)
if (userId && userId > 0) {
......
......@@ -8,6 +8,7 @@ isAvailable: true
supportedModes:
- push
defaultMode: push
schedule: false
props:
path:
type: String
......
......@@ -10,6 +10,7 @@ supportedModes:
- push
- pull
defaultMode: sync
schedule: PT5M
props:
authType:
type: String
......
......@@ -117,13 +117,12 @@ const init = {
}
},
async reload() {
console.warn(chalk.yellow('--- Stopping scheduled jobs...'))
if (global.WIKI.scheduler) {
global.WIKI.scheduler.stop()
}
console.warn(chalk.yellow('--- Closing DB connections...'))
await global.WIKI.models.knex.destroy()
console.warn(chalk.yellow('--- Closing Redis connections...'))
await global.WIKI.redis.quit()
await global.WIKI.redisSub.quit()
console.warn(chalk.yellow('--- Closing Queue connections...'))
await global.WIKI.queue.quit()
console.warn(chalk.yellow('--- Closing Server connections...'))
global.WIKI.server.destroy(() => {
console.warn(chalk.yellow('--- Purging node modules cache...'))
......
This diff was suppressed by a .gitattributes entry.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment