Commit cd3f88ba authored by Nick's avatar Nick

refactor: removed redis + new scheduler engine

parent e90873e1
...@@ -28,19 +28,18 @@ docker-dev-up: ## Run dockerized dev environment ...@@ -28,19 +28,18 @@ docker-dev-up: ## Run dockerized dev environment
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki yarn dev docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki yarn dev
docker-dev-down: ## Shutdown dockerized dev environment docker-dev-down: ## Shutdown dockerized dev environment
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . down docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . down --remove-orphans
docker-dev-rebuild: ## Rebuild dockerized dev image docker-dev-rebuild: ## Rebuild dockerized dev image
rm -rf ./node_modules rm -rf ./node_modules
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . build --no-cache --force-rm docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . build --no-cache --force-rm
docker-dev-clean: ## Clean DB, redis and data folders docker-dev-clean: ## Clean DB and data folders
rm -rf ./data rm -rf ./data
[[ "${DEVDB}" == "postgres" ]] && docker-compose -f ./dev/docker-postgres/docker-compose.yml -p wiki --project-directory . exec db psql --dbname=wiki --username=wikijs --command='DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public' || true [[ "${DEVDB}" == "postgres" ]] && docker-compose -f ./dev/docker-postgres/docker-compose.yml -p wiki --project-directory . exec db psql --dbname=wiki --username=wikijs --command='DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public' || true
[[ "${DEVDB}" == "mysql" || "${DEVDB}" == "mariadb" ]] && docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec db mysql -uroot -p'wikijsrocks' -e 'DROP SCHEMA IF EXISTS wiki; CREATE SCHEMA wiki;' || true [[ "${DEVDB}" == "mysql" || "${DEVDB}" == "mariadb" ]] && docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec db mysql -uroot -p'wikijsrocks' -e 'DROP SCHEMA IF EXISTS wiki; CREATE SCHEMA wiki;' || true
[[ "${DEVDB}" == "mssql" ]] && docker-compose -f ./dev/docker-mssql/docker-compose.yml -p wiki --project-directory . exec db /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P 'W1kiJSR0cks!' -Q 'DROP DATABASE IF EXISTS wiki; CREATE DATABASE wiki;' || true [[ "${DEVDB}" == "mssql" ]] && docker-compose -f ./dev/docker-mssql/docker-compose.yml -p wiki --project-directory . exec db /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P 'W1kiJSR0cks!' -Q 'DROP DATABASE IF EXISTS wiki; CREATE DATABASE wiki;' || true
[[ "${DEVDB}" == "sqlite" ]] && docker-compose -f ./dev/docker-sqlite/docker-compose.yml -p wiki --project-directory . exec wiki rm -rf /wiki/db.sqlite || true [[ "${DEVDB}" == "sqlite" ]] && docker-compose -f ./dev/docker-sqlite/docker-compose.yml -p wiki --project-directory . exec wiki rm -rf /wiki/db.sqlite || true
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec redis redis-cli flushall
docker-dev-bash: ## Rebuild dockerized dev image docker-dev-bash: ## Rebuild dockerized dev image
docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki bash docker-compose -f ./dev/docker-${DEVDB}/docker-compose.yml -p wiki --project-directory . exec wiki bash
......
...@@ -15,10 +15,6 @@ query { ...@@ -15,10 +15,6 @@ query {
ramTotal ramTotal
workingDirectory workingDirectory
nodeVersion nodeVersion
redisVersion
redisUsedRAM
redisTotalRAM
redisHost
} }
} }
} }
...@@ -31,23 +31,20 @@ db: ...@@ -31,23 +31,20 @@ db:
# SQLite only: # SQLite only:
storage: path/to/database.sqlite storage: path/to/database.sqlite
# ---------------------------------------------------------------------
# Redis
# ---------------------------------------------------------------------
# Redis 3.2 or later required
redis:
host: localhost
port: 6379
db: 0
password: null
####################################################################### #######################################################################
# ADVANCED OPTIONS # # ADVANCED OPTIONS #
####################################################################### #######################################################################
# Do not change unless you know what you are doing! # Do not change unless you know what you are doing!
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
# Use X-Forwarded-For header
# ---------------------------------------------------------------------
# Enable only if Wiki.js is behind a reverse-proxy (nginx, apache, etc)
# or a cloud proxying services like Cloudflare.
trustProxy: false
# ---------------------------------------------------------------------
# SSL/TLS Settings # SSL/TLS Settings
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
# Consider using a reverse proxy (e.g. nginx) if you require more # Consider using a reverse proxy (e.g. nginx) if you require more
......
...@@ -13,4 +13,5 @@ redis: ...@@ -13,4 +13,5 @@ redis:
port: $(REDIS_PORT) port: $(REDIS_PORT)
db: $(REDIS_DB) db: $(REDIS_DB)
password: $(REDIS_PASS) password: $(REDIS_PASS)
trustProxy: $(TRUST_PROXY)
logLevel: info logLevel: info
...@@ -7,8 +7,4 @@ db: ...@@ -7,8 +7,4 @@ db:
user: wikijs user: wikijs
pass: wikijsrocks pass: wikijsrocks
db: wiki db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info logLevel: info
...@@ -3,16 +3,6 @@ ...@@ -3,16 +3,6 @@
version: "3" version: "3"
services: services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db: db:
image: mariadb:10.3 image: mariadb:10.3
environment: environment:
...@@ -44,7 +34,6 @@ services: ...@@ -44,7 +34,6 @@ services:
dockerfile: dev/docker-mariadb/Dockerfile dockerfile: dev/docker-mariadb/Dockerfile
depends_on: depends_on:
- db - db
- redis
networks: networks:
- wikinet - wikinet
ports: ports:
......
#!/bin/sh #!/bin/sh
echo "Waiting for redis and mariadb to start up..." echo "Waiting for mariadb to start up..."
bash ./dev/docker-common/wait.sh redis:6379
bash ./dev/docker-common/wait.sh db:3306 bash ./dev/docker-common/wait.sh db:3306
echo "=== READY ===" echo "=== READY ==="
tail -f /dev/null tail -f /dev/null
...@@ -7,8 +7,4 @@ db: ...@@ -7,8 +7,4 @@ db:
user: SA user: SA
pass: W1kiJSR0cks! pass: W1kiJSR0cks!
db: wiki db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info logLevel: info
...@@ -3,16 +3,6 @@ ...@@ -3,16 +3,6 @@
version: "3" version: "3"
services: services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db: db:
image: mcr.microsoft.com/mssql/server:2017-latest image: mcr.microsoft.com/mssql/server:2017-latest
environment: environment:
...@@ -34,7 +24,6 @@ services: ...@@ -34,7 +24,6 @@ services:
dockerfile: dev/docker-mssql/Dockerfile dockerfile: dev/docker-mssql/Dockerfile
depends_on: depends_on:
- db - db
- redis
networks: networks:
- wikinet - wikinet
ports: ports:
......
#!/bin/sh #!/bin/sh
echo "Waiting for redis and mssql to start up..." echo "Waiting for mssql to start up..."
bash ./dev/docker-common/wait.sh redis:6379
bash ./dev/docker-common/wait.sh db:1433 bash ./dev/docker-common/wait.sh db:1433
echo "=== READY ===" echo "=== READY ==="
tail -f /dev/null tail -f /dev/null
...@@ -7,8 +7,4 @@ db: ...@@ -7,8 +7,4 @@ db:
user: wikijs user: wikijs
pass: wikijsrocks pass: wikijsrocks
db: wiki db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info logLevel: info
...@@ -3,16 +3,6 @@ ...@@ -3,16 +3,6 @@
version: "3" version: "3"
services: services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db: db:
image: mysql:5.7 image: mysql:5.7
environment: environment:
...@@ -44,7 +34,6 @@ services: ...@@ -44,7 +34,6 @@ services:
dockerfile: dev/docker-mysql/Dockerfile dockerfile: dev/docker-mysql/Dockerfile
depends_on: depends_on:
- db - db
- redis
networks: networks:
- wikinet - wikinet
ports: ports:
......
#!/bin/sh #!/bin/sh
echo "Waiting for redis and mysql to start up..." echo "Waiting for mysql to start up..."
bash ./dev/docker-common/wait.sh redis:6379
bash ./dev/docker-common/wait.sh db:3306 bash ./dev/docker-common/wait.sh db:3306
echo "=== READY ===" echo "=== READY ==="
tail -f /dev/null tail -f /dev/null
...@@ -7,8 +7,4 @@ db: ...@@ -7,8 +7,4 @@ db:
user: wikijs user: wikijs
pass: wikijsrocks pass: wikijsrocks
db: wiki db: wiki
redis:
host: redis
port: 6379
db: 0
logLevel: info logLevel: info
...@@ -3,16 +3,6 @@ ...@@ -3,16 +3,6 @@
version: "3" version: "3"
services: services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
db: db:
image: postgres:9-alpine image: postgres:9-alpine
environment: environment:
...@@ -43,7 +33,6 @@ services: ...@@ -43,7 +33,6 @@ services:
dockerfile: dev/docker-postgres/Dockerfile dockerfile: dev/docker-postgres/Dockerfile
depends_on: depends_on:
- db - db
- redis
networks: networks:
- wikinet - wikinet
ports: ports:
......
#!/bin/sh #!/bin/sh
echo "Waiting for redis and postgres to start up..." echo "Waiting for postgres to start up..."
bash ./dev/docker-common/wait.sh redis:6379
bash ./dev/docker-common/wait.sh db:5432 bash ./dev/docker-common/wait.sh db:5432
echo "=== READY ===" echo "=== READY ==="
tail -f /dev/null tail -f /dev/null
...@@ -3,8 +3,4 @@ bindIP: 0.0.0.0 ...@@ -3,8 +3,4 @@ bindIP: 0.0.0.0
db: db:
type: sqlite type: sqlite
storage: /wiki/db.sqlite storage: /wiki/db.sqlite
redis:
host: redis
port: 6379
db: 0
logLevel: info logLevel: info
...@@ -3,22 +3,10 @@ ...@@ -3,22 +3,10 @@
version: "3" version: "3"
services: services:
redis:
image: redis:4-alpine
ports:
- "16379:6379"
logging:
driver: "none"
networks:
- wikinet
wiki: wiki:
build: build:
context: . context: .
dockerfile: dev/docker-sqlite/Dockerfile dockerfile: dev/docker-sqlite/Dockerfile
depends_on:
- redis
networks: networks:
- wikinet - wikinet
ports: ports:
...@@ -26,7 +14,6 @@ services: ...@@ -26,7 +14,6 @@ services:
volumes: volumes:
- .:/wiki - .:/wiki
- /wiki/node_modules - /wiki/node_modules
command: ["sh", "./dev/docker-sqlite/init.sh"]
networks: networks:
wikinet: wikinet:
......
#!/bin/sh
echo "Waiting for redis to start up..."
bash ./dev/docker-common/wait.sh redis:6379
echo "=== READY ==="
tail -f /dev/null
version: "3" version: "3"
services: services:
redis:
image: redis:4-alpine
logging:
driver: "none"
networks:
- wikinet
db: db:
image: postgres:9-alpine image: postgres:9-alpine
environment: environment:
......
const { Client } = require('pg')
const fs = require('fs')
const path = require('path')
const yaml = require('js-yaml')
let config = {}
try {
conf = yaml.safeLoad(
cfgHelper.parseConfigValue(
fs.readFileSync(path.join(process.cwd(), 'dev/docker/config.yml'), 'utf8')
)
)
} catch (err) {
console.error(err.message)
process.exit(1)
}
const client = new Client({
user: config.db.username,
host: config.db.host,
database: config.db.database,
password: config.db.password,
port: config.db.port,
})
async function main () {
await client.connect()
await client.query('DROP SCHEMA public CASCADE;')
await client.query('CREATE SCHEMA public;')
await client.end()
console.info('Success.')
}
main()
...@@ -48,14 +48,11 @@ ...@@ -48,14 +48,11 @@
"bcryptjs-then": "1.0.1", "bcryptjs-then": "1.0.1",
"bluebird": "3.5.3", "bluebird": "3.5.3",
"body-parser": "1.18.3", "body-parser": "1.18.3",
"bull": "3.6.0",
"chalk": "2.4.2", "chalk": "2.4.2",
"cheerio": "1.0.0-rc.2", "cheerio": "1.0.0-rc.2",
"child-process-promise": "2.2.1",
"chokidar": "2.0.4", "chokidar": "2.0.4",
"clean-css": "4.2.1", "clean-css": "4.2.1",
"compression": "1.7.3", "compression": "1.7.3",
"connect-redis": "3.4.0",
"cookie-parser": "1.4.3", "cookie-parser": "1.4.3",
"cors": "2.8.5", "cors": "2.8.5",
"custom-error-instance": "2.1.1", "custom-error-instance": "2.1.1",
...@@ -63,14 +60,10 @@ ...@@ -63,14 +60,10 @@
"diff": "4.0.1", "diff": "4.0.1",
"diff2html": "2.7.0", "diff2html": "2.7.0",
"dotize": "^0.3.0", "dotize": "^0.3.0",
"execa": "1.0.0",
"express": "4.16.4", "express": "4.16.4",
"express-brute": "1.0.1", "express-brute": "1.0.1",
"express-brute-redis": "0.0.1",
"express-session": "1.15.6",
"file-type": "10.7.1", "file-type": "10.7.1",
"filesize": "4.0.0", "filesize": "4.0.0",
"follow-redirects": "1.6.1",
"fs-extra": "7.0.1", "fs-extra": "7.0.1",
"getos": "3.1.1", "getos": "3.1.1",
"graphql": "14.1.1", "graphql": "14.1.1",
...@@ -83,7 +76,6 @@ ...@@ -83,7 +76,6 @@
"i18next-localstorage-cache": "1.1.1", "i18next-localstorage-cache": "1.1.1",
"i18next-node-fs-backend": "2.1.1", "i18next-node-fs-backend": "2.1.1",
"image-size": "0.7.1", "image-size": "0.7.1",
"ioredis": "4.6.2",
"js-base64": "2.5.1", "js-base64": "2.5.1",
"js-binary": "1.2.0", "js-binary": "1.2.0",
"js-yaml": "3.12.1", "js-yaml": "3.12.1",
...@@ -117,7 +109,6 @@ ...@@ -117,7 +109,6 @@
"node-2fa": "1.1.2", "node-2fa": "1.1.2",
"node-cache": "4.2.0", "node-cache": "4.2.0",
"nodemailer": "5.1.1", "nodemailer": "5.1.1",
"oauth2orize": "1.11.0",
"objection": "1.5.3", "objection": "1.5.3",
"ora": "3.0.0", "ora": "3.0.0",
"passport": "0.4.0", "passport": "0.4.0",
...@@ -146,7 +137,6 @@ ...@@ -146,7 +137,6 @@
"pug": "2.0.3", "pug": "2.0.3",
"qr-image": "3.2.0", "qr-image": "3.2.0",
"raven": "2.6.4", "raven": "2.6.4",
"read-chunk": "3.0.0",
"remove-markdown": "0.3.0", "remove-markdown": "0.3.0",
"request": "2.88.0", "request": "2.88.0",
"request-promise": "4.2.2", "request-promise": "4.2.2",
......
...@@ -15,11 +15,6 @@ defaults: ...@@ -15,11 +15,6 @@ defaults:
pass: wikijsrocks pass: wikijsrocks
db: wiki db: wiki
storage: ./db.sqlite storage: ./db.sqlite
redis:
host: localhost
port: 6379
db: 0
password: null
ssl: ssl:
enabled: false enabled: false
bindIP: 0.0.0.0 bindIP: 0.0.0.0
...@@ -53,30 +48,15 @@ localeNamespaces: ...@@ -53,30 +48,15 @@ localeNamespaces:
- auth - auth
- common - common
jobs: jobs:
fetchGraphLocale:
onInit: false
cron: false
concurrency: 0
purgeUploads: purgeUploads:
onInit: true onInit: true
cron: '*/15 * * * *' schedule: PT15M
concurrency: 0
renderPage:
onInit: false
cron: false
concurrency: 1
syncGraphLocales: syncGraphLocales:
onInit: true onInit: true
cron: '0 0 * * *' schedule: P1D
concurrency: 0
syncGraphUpdates: syncGraphUpdates:
onInit: true onInit: true
cron: '0 0 * * *' schedule: P1D
concurrency: 0
syncStorage:
onInit: false
cron: false
concurrency: 1
groups: groups:
defaultPermissions: defaultPermissions:
- 'manage:pages' - 'manage:pages'
......
const NodeCache = require('node-cache')
module.exports = {
init() {
return new NodeCache()
}
}
const moment = require('moment')
const childProcess = require('child_process')
module.exports = class Job {
constructor({
name,
immediate = false,
schedule = 'P1D',
repeat = false,
worker = false
}) {
this.finished = Promise.resolve()
this.name = name
this.immediate = immediate
this.schedule = moment.duration(schedule)
this.repeat = repeat
this.worker = worker
}
/**
* Start Job
*
* @param {Object} data Job Data
*/
start(data) {
if (this.immediate) {
this.invoke(data)
} else {
this.queue(data)
}
}
/**
* Queue the next job run according to the wait duration
*
* @param {Object} data Job Data
*/
queue(data) {
this.timeout = setTimeout(this.invoke.bind(this), this.schedule.asMilliseconds(), data)
}
/**
* Run the actual job
*
* @param {Object} data Job Data
*/
async invoke(data) {
try {
if (this.worker) {
const proc = childProcess.fork(`server/core/worker.js`, [
`--job=${this.name}`,
`--data=${data}`
], {
cwd: WIKI.ROOTPATH
})
this.finished = new Promise((resolve, reject) => {
proc.on('exit', (code, signal) => {
if (code === 0) {
resolve()
} else {
reject(signal)
}
proc.kill()
})
})
} else {
this.finished = require(`../jobs/${this.name}`)(data)
}
await this.finished
} catch (err) {
WIKI.logger.warn(err)
}
if (this.repeat) {
this.queue(data)
}
}
/**
* Stop any future job invocation from occuring
*/
stop() {
clearTimeout(this.timeout)
}
}
...@@ -10,10 +10,10 @@ module.exports = { ...@@ -10,10 +10,10 @@ module.exports = {
WIKI.logger.info('=======================================') WIKI.logger.info('=======================================')
WIKI.models = require('./db').init() WIKI.models = require('./db').init()
WIKI.redis = require('./redis').init()
WIKI.queue = require('./queue').init()
await this.preBootMaster() await WIKI.models.onReady
await WIKI.configSvc.loadFromDb()
this.bootMaster() this.bootMaster()
}, },
/** /**
...@@ -21,11 +21,10 @@ module.exports = { ...@@ -21,11 +21,10 @@ module.exports = {
*/ */
async preBootMaster() { async preBootMaster() {
try { try {
await WIKI.models.onReady await this.initTelemetry()
await WIKI.configSvc.loadFromDb() WIKI.cache = require('./cache').init()
await WIKI.queue.clean() WIKI.scheduler = require('./scheduler').init()
WIKI.events = new EventEmitter() WIKI.events = new EventEmitter()
WIKI.redisSub = require('./redis').subscribe()
} catch (err) { } catch (err) {
WIKI.logger.error(err) WIKI.logger.error(err)
process.exit(1) process.exit(1)
...@@ -40,7 +39,7 @@ module.exports = { ...@@ -40,7 +39,7 @@ module.exports = {
WIKI.logger.info('Starting setup wizard...') WIKI.logger.info('Starting setup wizard...')
require('../setup')() require('../setup')()
} else { } else {
await this.initTelemetry() await this.preBootMaster()
await require('../master')() await require('../master')()
this.postBootMaster() this.postBootMaster()
} }
...@@ -62,7 +61,7 @@ module.exports = { ...@@ -62,7 +61,7 @@ module.exports = {
await WIKI.auth.activateStrategies() await WIKI.auth.activateStrategies()
await WIKI.models.storage.initTargets() await WIKI.models.storage.initTargets()
await WIKI.queue.start() WIKI.scheduler.start()
}, },
/** /**
* Init Telemetry * Init Telemetry
......
...@@ -27,15 +27,6 @@ module.exports = { ...@@ -27,15 +27,6 @@ module.exports = {
// Load current language + namespaces // Load current language + namespaces
this.refreshNamespaces(true) this.refreshNamespaces(true)
// Listen for localization events
WIKI.events.on('localization', (action) => {
switch (action) {
case 'reload':
this.refreshNamespaces()
break
}
})
return this return this
}, },
/** /**
......
const path = require('path')
const Bull = require('bull')
const Promise = require('bluebird')
const _ = require('lodash')
/* global WIKI */
module.exports = {
job: {},
init() {
_.forOwn(WIKI.data.jobs, (queueParams, queueName) => {
this.job[queueName] = new Bull(queueName, {
prefix: `queue`,
redis: WIKI.config.redis
})
if (queueParams.concurrency > 0) {
this.job[queueName].process(queueParams.concurrency, path.join(WIKI.SERVERPATH, `jobs/${_.kebabCase(queueName)}.js`))
} else {
this.job[queueName].process(path.join(WIKI.SERVERPATH, `jobs/${_.kebabCase(queueName)}.js`))
}
})
return this
},
start() {
_.forOwn(WIKI.data.jobs, (queueParams, queueName) => {
if (queueParams.onInit) {
this.job[queueName].add({}, {
removeOnComplete: true
})
}
if (queueParams.cron) {
this.job[queueName].add({}, {
repeat: { cron: queueParams.cron },
removeOnComplete: true
})
}
})
},
async quit() {
for (const queueName in this.job) {
await this.job[queueName].close()
}
},
async clean() {
return Promise.each(_.keys(WIKI.data.jobs), queueName => {
return new Promise((resolve, reject) => {
let keyStream = WIKI.redis.scanStream({
match: `queue:${queueName}:*`
})
keyStream.on('data', resultKeys => {
if (resultKeys.length > 0) {
WIKI.redis.del(resultKeys)
}
})
keyStream.on('end', resolve)
})
}).then(() => {
WIKI.logger.info('Purging old queue jobs: [ OK ]')
}).return(true).catch(err => {
WIKI.logger.error(err)
})
}
}
const Redis = require('ioredis')
const { isPlainObject } = require('lodash')
/* global WIKI */
module.exports = {
init() {
if (isPlainObject(WIKI.config.redis)) {
let red = new Redis(WIKI.config.redis)
red.on('ready', () => {
WIKI.logger.info('Redis connection: [ OK ]')
})
red.on('error', () => {
WIKI.logger.error('Failed to connect to Redis instance!')
process.exit(1)
})
return red
} else {
WIKI.logger.error('Invalid Redis configuration!')
process.exit(1)
}
},
subscribe() {
let red = this.init()
red.on('message', (channel, msg) => {
WIKI.events.emit(channel, msg)
})
red.subscribe('localization', 'updates', (err, count) => {
if (err) {
WIKI.logger.error(err)
process.exit(1)
}
})
return red
}
}
const Job = require('./job')
const _ = require('lodash')
/* global WIKI */
module.exports = {
jobs: [],
init() {
return this
},
start() {
_.forOwn(WIKI.data.jobs, (queueParams, queueName) => {
this.registerJob({
name: _.kebabCase(queueName),
immediate: queueParams.onInit,
schedule: queueParams.schedule,
repeat: true
})
})
},
registerJob(opts, data) {
const job = new Job(opts)
job.start(data)
if (job.repeat) {
this.jobs.push(job)
}
return job
},
stop() {
this.jobs.forEach(job => {
job.stop()
})
}
}
...@@ -15,15 +15,6 @@ module.exports = { ...@@ -15,15 +15,6 @@ module.exports = {
minimumNodeRequired: '10.12.0' minimumNodeRequired: '10.12.0'
}, },
init() { init() {
// Listen for updates events
WIKI.events.on('updates', (infoRaw) => {
try {
this.updates = JSON.parse(infoRaw)
} catch (err) {
WIKI.logger.warn('Failed to parse updates info.')
}
})
// Clear content cache // Clear content cache
fs.emptyDir(path.join(WIKI.ROOTPATH, 'data/cache')) fs.emptyDir(path.join(WIKI.ROOTPATH, 'data/cache'))
......
...@@ -10,9 +10,10 @@ let WIKI = { ...@@ -10,9 +10,10 @@ let WIKI = {
global.WIKI = WIKI global.WIKI = WIKI
WIKI.configSvc.init() WIKI.configSvc.init()
// ----------------------------------------
// Init Logger
// ----------------------------------------
WIKI.logger = require('./logger').init('JOB') WIKI.logger = require('./logger').init('JOB')
const args = require('yargs').argv
;(async () => {
await require(`../jobs/${args.job}`)(args.data)
process.exit(0)
})()
...@@ -11,7 +11,7 @@ module.exports = { ...@@ -11,7 +11,7 @@ module.exports = {
*/ */
async getMigrations() { async getMigrations() {
const absoluteDir = path.join(WIKI.SERVERPATH, 'db/migrations') const absoluteDir = path.join(WIKI.SERVERPATH, 'db/migrations')
const migrationFiles = await fs.readdirAsync(absoluteDir) const migrationFiles = await fs.readdir(absoluteDir)
return migrationFiles.sort(semver.compare).map(m => ({ return migrationFiles.sort(semver.compare).map(m => ({
file: m, file: m,
directory: absoluteDir directory: absoluteDir
......
...@@ -12,9 +12,9 @@ module.exports = { ...@@ -12,9 +12,9 @@ module.exports = {
}, },
LocalizationQuery: { LocalizationQuery: {
async locales(obj, args, context, info) { async locales(obj, args, context, info) {
let remoteLocales = await WIKI.redis.get('locales') let remoteLocales = await WIKI.cache.get('locales')
let localLocales = await WIKI.models.locales.query().select('code', 'isRTL', 'name', 'nativeName', 'createdAt', 'updatedAt') let localLocales = await WIKI.models.locales.query().select('code', 'isRTL', 'name', 'nativeName', 'createdAt', 'updatedAt')
remoteLocales = (remoteLocales) ? JSON.parse(remoteLocales) : localLocales remoteLocales = (remoteLocales) ? remoteLocales : localLocales
return _.map(remoteLocales, rl => { return _.map(remoteLocales, rl => {
let isInstalled = _.some(localLocales, ['code', rl.code]) let isInstalled = _.some(localLocales, ['code', rl.code])
return { return {
...@@ -39,12 +39,11 @@ module.exports = { ...@@ -39,12 +39,11 @@ module.exports = {
LocalizationMutation: { LocalizationMutation: {
async downloadLocale(obj, args, context) { async downloadLocale(obj, args, context) {
try { try {
const job = await WIKI.queue.job.fetchGraphLocale.add({ const job = await WIKI.scheduler.registerJob({
locale: args.locale name: 'fetch-graph-locale',
}, { immediate: true
timeout: 30000 }, args.locale)
}) await job.finished
await job.finished()
return { return {
responseResult: graphHelper.generateSuccess('Locale downloaded successfully') responseResult: graphHelper.generateSuccess('Locale downloaded successfully')
} }
......
...@@ -98,18 +98,6 @@ module.exports = { ...@@ -98,18 +98,6 @@ module.exports = {
nodeVersion() { nodeVersion() {
return process.version.substr(1) return process.version.substr(1)
}, },
redisVersion() {
return WIKI.redis.serverInfo.redis_version
},
redisUsedRAM() {
return WIKI.redis.serverInfo.used_memory_human
},
redisTotalRAM() {
return _.get(WIKI.redis.serverInfo, 'total_system_memory_human', 'N/A')
},
redisHost() {
return WIKI.redis.options.host
},
async groupsTotal() { async groupsTotal() {
const total = await WIKI.models.groups.query().count('* as total').first().pluck('total') const total = await WIKI.models.groups.query().count('* as total').first().pluck('total')
return _.toSafeInteger(total) return _.toSafeInteger(total)
......
...@@ -38,10 +38,6 @@ type SystemInfo { ...@@ -38,10 +38,6 @@ type SystemInfo {
pagesTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:pages", "delete:pages"]) pagesTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:pages", "delete:pages"])
platform: String @auth(requires: ["manage:system"]) platform: String @auth(requires: ["manage:system"])
ramTotal: String @auth(requires: ["manage:system"]) ramTotal: String @auth(requires: ["manage:system"])
redisHost: String @auth(requires: ["manage:system"])
redisTotalRAM: String @auth(requires: ["manage:system"])
redisUsedRAM: String @auth(requires: ["manage:system"])
redisVersion: String @auth(requires: ["manage:system"])
usersTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:groups", "write:groups", "manage:users", "write:users"]) usersTotal: Int @auth(requires: ["manage:system", "manage:navigation", "manage:groups", "write:groups", "manage:users", "write:users"])
workingDirectory: String @auth(requires: ["manage:system"]) workingDirectory: String @auth(requires: ["manage:system"])
} }
require('../core/worker')
const _ = require('lodash') const _ = require('lodash')
const { createApolloFetch } = require('apollo-fetch') const { createApolloFetch } = require('apollo-fetch')
/* global WIKI */ /* global WIKI */
WIKI.redis = require('../core/redis').init() module.exports = async (localeCode) => {
WIKI.models = require('../core/db').init() WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint...`)
module.exports = async (job) => {
WIKI.logger.info(`Fetching locale ${job.data.locale} from Graph endpoint...`)
try { try {
await WIKI.configSvc.loadFromDb()
const apollo = createApolloFetch({ const apollo = createApolloFetch({
uri: WIKI.config.graphEndpoint uri: WIKI.config.graphEndpoint
}) })
...@@ -26,7 +21,7 @@ module.exports = async (job) => { ...@@ -26,7 +21,7 @@ module.exports = async (job) => {
} }
}`, }`,
variables: { variables: {
code: job.data.locale code: localeCode
} }
}) })
const strings = _.get(respStrings, 'data.localization.strings', []) const strings = _.get(respStrings, 'data.localization.strings', [])
...@@ -36,12 +31,12 @@ module.exports = async (job) => { ...@@ -36,12 +31,12 @@ module.exports = async (job) => {
_.set(lcObj, row.key.replace(':', '.'), row.value) _.set(lcObj, row.key.replace(':', '.'), row.value)
}) })
const locales = await WIKI.redis.get('locales') const locales = await WIKI.cache.get('locales')
if (locales) { if (locales) {
const currentLocale = _.find(JSON.parse(locales), ['code', job.data.locale]) || {} const currentLocale = _.find(locales, ['code', localeCode]) || {}
await WIKI.models.locales.query().delete().where('code', job.data.locale) await WIKI.models.locales.query().delete().where('code', localeCode)
await WIKI.models.locales.query().insert({ await WIKI.models.locales.query().insert({
code: job.data.locale, code: localeCode,
strings: lcObj, strings: lcObj,
isRTL: currentLocale.isRTL, isRTL: currentLocale.isRTL,
name: currentLocale.name, name: currentLocale.name,
...@@ -51,11 +46,11 @@ module.exports = async (job) => { ...@@ -51,11 +46,11 @@ module.exports = async (job) => {
throw new Error('Failed to fetch cached locales list! Restart server to resolve this issue.') throw new Error('Failed to fetch cached locales list! Restart server to resolve this issue.')
} }
await WIKI.redis.publish('localization', 'reload') await WIKI.lang.refreshNamespaces()
WIKI.logger.info(`Fetching locale ${job.data.locale} from Graph endpoint: [ COMPLETED ]`) WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint: [ COMPLETED ]`)
} catch (err) { } catch (err) {
WIKI.logger.error(`Fetching locale ${job.data.locale} from Graph endpoint: [ FAILED ]`) WIKI.logger.error(`Fetching locale ${localeCode} from Graph endpoint: [ FAILED ]`)
WIKI.logger.error(err.message) WIKI.logger.error(err.message)
} }
} }
require('../core/worker')
/* global WIKI */ /* global WIKI */
const Promise = require('bluebird') const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs-extra')) const fs = require('fs-extra')
const moment = require('moment') const moment = require('moment')
const path = require('path') const path = require('path')
module.exports = async (job) => { module.exports = async () => {
WIKI.logger.info('Purging orphaned upload files...') WIKI.logger.info('Purging orphaned upload files...')
try { try {
const uplTempPath = path.resolve(process.cwd(), WIKI.config.paths.data, 'uploads') const uplTempPath = path.resolve(process.cwd(), WIKI.config.paths.data, 'uploads')
const ls = await fs.readdirAsync(uplTempPath) const ls = await fs.readdir(uplTempPath)
const fifteenAgo = moment().subtract(15, 'minutes') const fifteenAgo = moment().subtract(15, 'minutes')
await Promise.map(ls, (f) => { await Promise.map(ls, (f) => {
return fs.statAsync(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } }) return fs.stat(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } })
}).filter((s) => { return s.stat.isFile() }).then((arrFiles) => { }).filter((s) => { return s.stat.isFile() }).then((arrFiles) => {
return Promise.map(arrFiles, (f) => { return Promise.map(arrFiles, (f) => {
if (moment(f.stat.ctime).isBefore(fifteenAgo, 'minute')) { if (moment(f.stat.ctime).isBefore(fifteenAgo, 'minute')) {
return fs.unlinkAsync(path.join(uplTempPath, f.filename)) return fs.unlink(path.join(uplTempPath, f.filename))
} }
}) })
}) })
......
require('../core/worker')
const _ = require('lodash') const _ = require('lodash')
const cheerio = require('cheerio') const cheerio = require('cheerio')
/* global WIKI */ /* global WIKI */
WIKI.models = require('../core/db').init() module.exports = async (pageId) => {
WIKI.logger.info(`Rendering page ID ${pageId}...`)
module.exports = async (job) => {
WIKI.logger.info(`Rendering page ${job.data.page.path}...`)
try { try {
let output = job.data.page.content WIKI.models = require('../core/db').init()
for (let core of job.data.pipeline) {
const page = await WIKI.models.pages.getPageFromDb(pageId)
if (!page) {
throw new Error('Invalid Page Id')
}
await WIKI.models.renderers.fetchDefinitions()
const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType)
let output = page.content
for (let core of pipeline) {
const renderer = require(`../modules/rendering/${_.kebabCase(core.key)}/renderer.js`) const renderer = require(`../modules/rendering/${_.kebabCase(core.key)}/renderer.js`)
output = await renderer.render.call({ output = await renderer.render.call({
config: core.config, config: core.config,
children: core.children, children: core.children,
page: job.data.page, page: page,
input: output input: output
}) })
} }
...@@ -61,18 +67,20 @@ module.exports = async (job) => { ...@@ -61,18 +67,20 @@ module.exports = async (job) => {
render: output, render: output,
toc: JSON.stringify(toc.root) toc: JSON.stringify(toc.root)
}) })
.where('id', job.data.page.id) .where('id', pageId)
// Save to cache // Save to cache
await WIKI.models.pages.savePageToCache({ await WIKI.models.pages.savePageToCache({
...job.data.page, ...page,
render: output, render: output,
toc: JSON.stringify(toc.root) toc: JSON.stringify(toc.root)
}) })
WIKI.logger.info(`Rendering page ${job.data.page.path}: [ COMPLETED ]`) await WIKI.models.knex.destroy()
WIKI.logger.info(`Rendering page ID ${pageId}: [ COMPLETED ]`)
} catch (err) { } catch (err) {
WIKI.logger.error(`Rendering page ${job.data.page.path}: [ FAILED ]`) WIKI.logger.error(`Rendering page ID ${pageId}: [ FAILED ]`)
WIKI.logger.error(err.message) WIKI.logger.error(err.message)
} }
} }
require('../core/worker')
const _ = require('lodash') const _ = require('lodash')
const { createApolloFetch } = require('apollo-fetch') const { createApolloFetch } = require('apollo-fetch')
/* global WIKI */ /* global WIKI */
WIKI.redis = require('../core/redis').init() module.exports = async () => {
WIKI.models = require('../core/db').init()
module.exports = async (job) => {
WIKI.logger.info('Syncing locales with Graph endpoint...') WIKI.logger.info('Syncing locales with Graph endpoint...')
try { try {
await WIKI.configSvc.loadFromDb()
const apollo = createApolloFetch({ const apollo = createApolloFetch({
uri: WIKI.config.graphEndpoint uri: WIKI.config.graphEndpoint
}) })
...@@ -33,7 +28,7 @@ module.exports = async (job) => { ...@@ -33,7 +28,7 @@ module.exports = async (job) => {
}` }`
}) })
const locales = _.sortBy(_.get(respList, 'data.localization.locales', []), 'name').map(lc => ({...lc, isInstalled: (lc.code === 'en')})) const locales = _.sortBy(_.get(respList, 'data.localization.locales', []), 'name').map(lc => ({...lc, isInstalled: (lc.code === 'en')}))
WIKI.redis.set('locales', JSON.stringify(locales)) WIKI.cache.set('locales', locales)
const currentLocale = _.find(locales, ['code', WIKI.config.lang.code]) const currentLocale = _.find(locales, ['code', WIKI.config.lang.code])
// -> Download locale strings // -> Download locale strings
...@@ -68,7 +63,7 @@ module.exports = async (job) => { ...@@ -68,7 +63,7 @@ module.exports = async (job) => {
}).where('code', WIKI.config.lang.code) }).where('code', WIKI.config.lang.code)
} }
await WIKI.redis.publish('localization', 'reload') await WIKI.lang.refreshNamespaces()
WIKI.logger.info('Syncing locales with Graph endpoint: [ COMPLETED ]') WIKI.logger.info('Syncing locales with Graph endpoint: [ COMPLETED ]')
} catch (err) { } catch (err) {
......
require('../core/worker')
const _ = require('lodash') const _ = require('lodash')
const { createApolloFetch } = require('apollo-fetch') const { createApolloFetch } = require('apollo-fetch')
/* global WIKI */ /* global WIKI */
WIKI.redis = require('../core/redis').init() module.exports = async () => {
WIKI.models = require('../core/db').init()
module.exports = async (job) => {
WIKI.logger.info(`Fetching latest updates from Graph endpoint...`) WIKI.logger.info(`Fetching latest updates from Graph endpoint...`)
try { try {
await WIKI.configSvc.loadFromDb()
const apollo = createApolloFetch({ const apollo = createApolloFetch({
uri: WIKI.config.graphEndpoint uri: WIKI.config.graphEndpoint
}) })
...@@ -33,9 +28,10 @@ module.exports = async (job) => { ...@@ -33,9 +28,10 @@ module.exports = async (job) => {
version: WIKI.version version: WIKI.version
} }
}) })
const info = _.get(resp, 'data.releases.checkForUpdates', {}) const info = _.get(resp, 'data.releases.checkForUpdates', false)
if (info) {
await WIKI.redis.publish('updates', JSON.stringify(info)) WIKI.system.updates = info
}
WIKI.logger.info(`Fetching latest updates from Graph endpoint: [ COMPLETED ]`) WIKI.logger.info(`Fetching latest updates from Graph endpoint: [ COMPLETED ]`)
} catch (err) { } catch (err) {
......
...@@ -46,7 +46,9 @@ module.exports = async () => { ...@@ -46,7 +46,9 @@ module.exports = async () => {
app.use(mw.security) app.use(mw.security)
app.use(cors(WIKI.config.cors)) app.use(cors(WIKI.config.cors))
app.options('*', cors(WIKI.config.cors)) app.options('*', cors(WIKI.config.cors))
if (WIKI.config.trustProxy) {
app.enable('trust proxy') app.enable('trust proxy')
}
// ---------------------------------------- // ----------------------------------------
// Public Assets // Public Assets
......
...@@ -23,15 +23,15 @@ module.exports = class Navigation extends Model { ...@@ -23,15 +23,15 @@ module.exports = class Navigation extends Model {
static async getTree({ cache = false } = {}) { static async getTree({ cache = false } = {}) {
if (cache) { if (cache) {
const navTreeCached = await WIKI.redis.get('nav:sidebar') const navTreeCached = await WIKI.cache.get('nav:sidebar')
if (navTreeCached) { if (navTreeCached) {
return JSON.parse(navTreeCached) return navTreeCached
} }
} }
const navTree = await WIKI.models.navigation.query().findOne('key', 'site') const navTree = await WIKI.models.navigation.query().findOne('key', 'site')
if (navTree) { if (navTree) {
if (cache) { if (cache) {
await WIKI.redis.set('nav:sidebar', JSON.stringify(navTree.config), 'EX', 300) await WIKI.cache.set('nav:sidebar', navTree.config, 300)
} }
return navTree.config return navTree.config
} else { } else {
......
...@@ -217,15 +217,12 @@ module.exports = class Page extends Model { ...@@ -217,15 +217,12 @@ module.exports = class Page extends Model {
} }
static async renderPage(page) { static async renderPage(page) {
const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType) const renderJob = await WIKI.scheduler.registerJob({
const renderJob = await WIKI.queue.job.renderPage.add({ name: 'render-page',
page, immediate: true,
pipeline worker: true
}, { }, page.id)
removeOnComplete: true, return renderJob.finished
removeOnFail: true
})
return renderJob.finished()
} }
static async getPage(opts) { static async getPage(opts) {
...@@ -240,6 +237,7 @@ module.exports = class Page extends Model { ...@@ -240,6 +237,7 @@ module.exports = class Page extends Model {
} }
static async getPageFromDb(opts) { static async getPageFromDb(opts) {
const queryModeID = _.isNumber(opts)
return WIKI.models.pages.query() return WIKI.models.pages.query()
.column([ .column([
'pages.*', 'pages.*',
...@@ -252,11 +250,14 @@ module.exports = class Page extends Model { ...@@ -252,11 +250,14 @@ module.exports = class Page extends Model {
]) ])
.joinRelation('author') .joinRelation('author')
.joinRelation('creator') .joinRelation('creator')
.where({ .where(queryModeID ? {
'pages.id': opts
} : {
'pages.path': opts.path, 'pages.path': opts.path,
'pages.localeCode': opts.locale 'pages.localeCode': opts.locale
}) })
.andWhere(builder => { .andWhere(builder => {
if (queryModeID) return
builder.where({ builder.where({
'pages.isPublished': true 'pages.isPublished': true
}).orWhere({ }).orWhere({
...@@ -265,6 +266,7 @@ module.exports = class Page extends Model { ...@@ -265,6 +266,7 @@ module.exports = class Page extends Model {
}) })
}) })
.andWhere(builder => { .andWhere(builder => {
if (queryModeID) return
if (opts.isPrivate) { if (opts.isPrivate) {
builder.where({ 'pages.isPrivate': true, 'pages.privateNS': opts.privateNS }) builder.where({ 'pages.isPrivate': true, 'pages.privateNS': opts.privateNS })
} else { } else {
......
...@@ -35,12 +35,7 @@ module.exports = class Renderer extends Model { ...@@ -35,12 +35,7 @@ module.exports = class Renderer extends Model {
return WIKI.models.renderers.query() return WIKI.models.renderers.query()
} }
static async refreshRenderersFromDisk() { static async fetchDefinitions() {
let trx
try {
const dbRenderers = await WIKI.models.renderers.query()
// -> Fetch definitions from disk
const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering')) const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
let diskRenderers = [] let diskRenderers = []
for (let dir of rendererDirs) { for (let dir of rendererDirs) {
...@@ -51,6 +46,15 @@ module.exports = class Renderer extends Model { ...@@ -51,6 +46,15 @@ module.exports = class Renderer extends Model {
...renderer, ...renderer,
props: commonHelper.parseModuleProps(renderer.props) props: commonHelper.parseModuleProps(renderer.props)
})) }))
}
static async refreshRenderersFromDisk() {
let trx
try {
const dbRenderers = await WIKI.models.renderers.query()
// -> Fetch definitions from disk
await WIKI.models.renderers.fetchDefinitions()
// -> Insert new Renderers // -> Insert new Renderers
let newRenderers = [] let newRenderers = []
......
...@@ -107,6 +107,11 @@ module.exports = class Storage extends Model { ...@@ -107,6 +107,11 @@ module.exports = class Storage extends Model {
target.fn.config = target.config target.fn.config = target.config
target.fn.mode = target.mode target.fn.mode = target.mode
await target.fn.init() await target.fn.init()
// if (target.schedule) {
// WIKI.scheduler.registerJob({
// name:
// }, target.fn.sync)
// }
} }
} catch (err) { } catch (err) {
WIKI.logger.warn(err) WIKI.logger.warn(err)
......
...@@ -294,7 +294,7 @@ module.exports = class User extends Model { ...@@ -294,7 +294,7 @@ module.exports = class User extends Model {
static async loginTFA(opts, context) { static async loginTFA(opts, context) {
if (opts.securityCode.length === 6 && opts.loginToken.length === 64) { if (opts.securityCode.length === 6 && opts.loginToken.length === 64) {
let result = await WIKI.redis.get(`tfa:${opts.loginToken}`) let result = null // await WIKI.redis.get(`tfa:${opts.loginToken}`)
if (result) { if (result) {
let userId = _.toSafeInteger(result) let userId = _.toSafeInteger(result)
if (userId && userId > 0) { if (userId && userId > 0) {
......
...@@ -8,6 +8,7 @@ isAvailable: true ...@@ -8,6 +8,7 @@ isAvailable: true
supportedModes: supportedModes:
- push - push
defaultMode: push defaultMode: push
schedule: false
props: props:
path: path:
type: String type: String
......
...@@ -10,6 +10,7 @@ supportedModes: ...@@ -10,6 +10,7 @@ supportedModes:
- push - push
- pull - pull
defaultMode: sync defaultMode: sync
schedule: PT5M
props: props:
authType: authType:
type: String type: String
......
...@@ -117,13 +117,12 @@ const init = { ...@@ -117,13 +117,12 @@ const init = {
} }
}, },
async reload() { async reload() {
console.warn(chalk.yellow('--- Stopping scheduled jobs...'))
if (global.WIKI.scheduler) {
global.WIKI.scheduler.stop()
}
console.warn(chalk.yellow('--- Closing DB connections...')) console.warn(chalk.yellow('--- Closing DB connections...'))
await global.WIKI.models.knex.destroy() await global.WIKI.models.knex.destroy()
console.warn(chalk.yellow('--- Closing Redis connections...'))
await global.WIKI.redis.quit()
await global.WIKI.redisSub.quit()
console.warn(chalk.yellow('--- Closing Queue connections...'))
await global.WIKI.queue.quit()
console.warn(chalk.yellow('--- Closing Server connections...')) console.warn(chalk.yellow('--- Closing Server connections...'))
global.WIKI.server.destroy(() => { global.WIKI.server.destroy(() => {
console.warn(chalk.yellow('--- Purging node modules cache...')) console.warn(chalk.yellow('--- Purging node modules cache...'))
......
This diff was suppressed by a .gitattributes entry.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment