mirror of
https://github.com/hedgedoc/hedgedoc.git
synced 2025-05-14 15:14:56 -04:00
Linter: Fix all lint errors
Signed-off-by: Philip Molares <philip.molares@udo.edu>
This commit is contained in:
parent
b0a45bdf9c
commit
136d895d15
51 changed files with 2245 additions and 1539 deletions
|
@ -1,32 +1,32 @@
|
|||
'use strict'
|
||||
// external modules
|
||||
var fs = require('fs')
|
||||
var path = require('path')
|
||||
var LZString = require('lz-string')
|
||||
var base64url = require('base64url')
|
||||
var md = require('markdown-it')()
|
||||
var metaMarked = require('meta-marked')
|
||||
var cheerio = require('cheerio')
|
||||
var shortId = require('shortid')
|
||||
var Sequelize = require('sequelize')
|
||||
var async = require('async')
|
||||
var moment = require('moment')
|
||||
var DiffMatchPatch = require('diff-match-patch')
|
||||
var dmp = new DiffMatchPatch()
|
||||
var S = require('string')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const LZString = require('lz-string')
|
||||
const base64url = require('base64url')
|
||||
const md = require('markdown-it')()
|
||||
const metaMarked = require('meta-marked')
|
||||
const cheerio = require('cheerio')
|
||||
const shortId = require('shortid')
|
||||
const Sequelize = require('sequelize')
|
||||
const async = require('async')
|
||||
const moment = require('moment')
|
||||
const DiffMatchPatch = require('diff-match-patch')
|
||||
const dmp = new DiffMatchPatch()
|
||||
const S = require('string')
|
||||
|
||||
// core
|
||||
var config = require('../config')
|
||||
var logger = require('../logger')
|
||||
const config = require('../config')
|
||||
const logger = require('../logger')
|
||||
|
||||
// ot
|
||||
var ot = require('../ot')
|
||||
const ot = require('../ot')
|
||||
|
||||
// permission types
|
||||
var permissionTypes = ['freely', 'editable', 'limited', 'locked', 'protected', 'private']
|
||||
const permissionTypes = ['freely', 'editable', 'limited', 'locked', 'protected', 'private']
|
||||
|
||||
module.exports = function (sequelize, DataTypes) {
|
||||
var Note = sequelize.define('Note', {
|
||||
const Note = sequelize.define('Note', {
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
primaryKey: true,
|
||||
|
@ -91,7 +91,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return new Promise(function (resolve, reject) {
|
||||
// if no content specified then use default note
|
||||
if (!note.content) {
|
||||
var body = null
|
||||
let body = null
|
||||
let filePath = null
|
||||
if (note.alias) {
|
||||
filePath = path.join(config.docsPath, note.alias + '.md')
|
||||
|
@ -100,7 +100,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
filePath = config.defaultNotePath
|
||||
}
|
||||
if (Note.checkFileExist(filePath)) {
|
||||
var fsCreatedTime = moment(fs.statSync(filePath).ctime)
|
||||
const fsCreatedTime = moment(fs.statSync(filePath).ctime)
|
||||
body = fs.readFileSync(filePath, 'utf8')
|
||||
note.title = Note.parseNoteTitle(body)
|
||||
note.content = body
|
||||
|
@ -165,15 +165,15 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
Note.encodeNoteId = function (id) {
|
||||
// remove dashes in UUID and encode in url-safe base64
|
||||
let str = id.replace(/-/g, '')
|
||||
let hexStr = Buffer.from(str, 'hex')
|
||||
const str = id.replace(/-/g, '')
|
||||
const hexStr = Buffer.from(str, 'hex')
|
||||
return base64url.encode(hexStr)
|
||||
}
|
||||
Note.decodeNoteId = function (encodedId) {
|
||||
// decode from url-safe base64
|
||||
let id = base64url.toBuffer(encodedId).toString('hex')
|
||||
const id = base64url.toBuffer(encodedId).toString('hex')
|
||||
// add dashes between the UUID string parts
|
||||
let idParts = []
|
||||
const idParts = []
|
||||
idParts.push(id.substr(0, 8))
|
||||
idParts.push(id.substr(8, 4))
|
||||
idParts.push(id.substr(12, 4))
|
||||
|
@ -182,8 +182,8 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return idParts.join('-')
|
||||
}
|
||||
Note.checkNoteIdValid = function (id) {
|
||||
var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
|
||||
var result = id.match(uuidRegex)
|
||||
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
|
||||
const result = id.match(uuidRegex)
|
||||
if (result && result.length === 1) { return true } else { return false }
|
||||
}
|
||||
Note.parseNoteId = function (noteId, callback) {
|
||||
|
@ -196,15 +196,15 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
}).then(function (note) {
|
||||
if (note) {
|
||||
let filePath = path.join(config.docsPath, noteId + '.md')
|
||||
const filePath = path.join(config.docsPath, noteId + '.md')
|
||||
if (Note.checkFileExist(filePath)) {
|
||||
// if doc in filesystem have newer modified time than last change time
|
||||
// then will update the doc in db
|
||||
var fsModifiedTime = moment(fs.statSync(filePath).mtime)
|
||||
var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
|
||||
var body = fs.readFileSync(filePath, 'utf8')
|
||||
var contentLength = body.length
|
||||
var title = Note.parseNoteTitle(body)
|
||||
const fsModifiedTime = moment(fs.statSync(filePath).mtime)
|
||||
const dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
|
||||
const body = fs.readFileSync(filePath, 'utf8')
|
||||
const contentLength = body.length
|
||||
const title = Note.parseNoteTitle(body)
|
||||
if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
|
||||
note.update({
|
||||
title: title,
|
||||
|
@ -214,9 +214,9 @@ module.exports = function (sequelize, DataTypes) {
|
|||
sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
|
||||
if (err) return _callback(err, null)
|
||||
// update authorship on after making revision of docs
|
||||
var patch = dmp.patch_fromText(revision.patch)
|
||||
var operations = Note.transformPatchToOperations(patch, contentLength)
|
||||
var authorship = note.authorship
|
||||
const patch = dmp.patch_fromText(revision.patch)
|
||||
const operations = Note.transformPatchToOperations(patch, contentLength)
|
||||
let authorship = note.authorship
|
||||
for (let i = 0; i < operations.length; i++) {
|
||||
authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
|
||||
}
|
||||
|
@ -238,7 +238,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return callback(null, note.id)
|
||||
}
|
||||
} else {
|
||||
var filePath = path.join(config.docsPath, noteId + '.md')
|
||||
const filePath = path.join(config.docsPath, noteId + '.md')
|
||||
if (Note.checkFileExist(filePath)) {
|
||||
Note.create({
|
||||
alias: noteId,
|
||||
|
@ -270,7 +270,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
// try to parse note id by LZString Base64
|
||||
try {
|
||||
var id = LZString.decompressFromBase64(noteId)
|
||||
const id = LZString.decompressFromBase64(noteId)
|
||||
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
|
||||
} catch (err) {
|
||||
if (err.message === 'Cannot read property \'charAt\' of undefined') {
|
||||
|
@ -284,7 +284,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
parseNoteIdByBase64Url: function (_callback) {
|
||||
// try to parse note id by base64url
|
||||
try {
|
||||
var id = Note.decodeNoteId(noteId)
|
||||
const id = Note.decodeNoteId(noteId)
|
||||
if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
|
@ -321,24 +321,24 @@ module.exports = function (sequelize, DataTypes) {
|
|||
})
|
||||
}
|
||||
Note.parseNoteInfo = function (body) {
|
||||
var parsed = Note.extractMeta(body)
|
||||
var $ = cheerio.load(md.render(parsed.markdown))
|
||||
const parsed = Note.extractMeta(body)
|
||||
const $ = cheerio.load(md.render(parsed.markdown))
|
||||
return {
|
||||
title: Note.extractNoteTitle(parsed.meta, $),
|
||||
tags: Note.extractNoteTags(parsed.meta, $)
|
||||
}
|
||||
}
|
||||
Note.parseNoteTitle = function (body) {
|
||||
var parsed = Note.extractMeta(body)
|
||||
var $ = cheerio.load(md.render(parsed.markdown))
|
||||
const parsed = Note.extractMeta(body)
|
||||
const $ = cheerio.load(md.render(parsed.markdown))
|
||||
return Note.extractNoteTitle(parsed.meta, $)
|
||||
}
|
||||
Note.extractNoteTitle = function (meta, $) {
|
||||
var title = ''
|
||||
let title = ''
|
||||
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
|
||||
title = meta.title
|
||||
} else {
|
||||
var h1s = $('h1')
|
||||
const h1s = $('h1')
|
||||
if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
|
||||
}
|
||||
if (!title) title = 'Untitled'
|
||||
|
@ -355,28 +355,28 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return title
|
||||
}
|
||||
Note.extractNoteTags = function (meta, $) {
|
||||
var tags = []
|
||||
var rawtags = []
|
||||
const tags = []
|
||||
const rawtags = []
|
||||
if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
|
||||
var metaTags = ('' + meta.tags).split(',')
|
||||
const metaTags = ('' + meta.tags).split(',')
|
||||
for (let i = 0; i < metaTags.length; i++) {
|
||||
var text = metaTags[i].trim()
|
||||
const text = metaTags[i].trim()
|
||||
if (text) rawtags.push(text)
|
||||
}
|
||||
} else {
|
||||
var h6s = $('h6')
|
||||
const h6s = $('h6')
|
||||
h6s.each(function (key, value) {
|
||||
if (/^tags/gmi.test($(value).text())) {
|
||||
var codes = $(value).find('code')
|
||||
const codes = $(value).find('code')
|
||||
for (let i = 0; i < codes.length; i++) {
|
||||
var text = S($(codes[i]).text().trim()).stripTags().s
|
||||
const text = S($(codes[i]).text().trim()).stripTags().s
|
||||
if (text) rawtags.push(text)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
for (let i = 0; i < rawtags.length; i++) {
|
||||
var found = false
|
||||
let found = false
|
||||
for (let j = 0; j < tags.length; j++) {
|
||||
if (tags[j] === rawtags[i]) {
|
||||
found = true
|
||||
|
@ -388,7 +388,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return tags
|
||||
}
|
||||
Note.extractMeta = function (content) {
|
||||
var obj = null
|
||||
let obj = null
|
||||
try {
|
||||
obj = metaMarked(content)
|
||||
if (!obj.markdown) obj.markdown = ''
|
||||
|
@ -402,7 +402,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return obj
|
||||
}
|
||||
Note.parseMeta = function (meta) {
|
||||
var _meta = {}
|
||||
const _meta = {}
|
||||
if (meta) {
|
||||
if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
|
||||
if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
|
||||
|
@ -416,7 +416,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return _meta
|
||||
}
|
||||
Note.parseOpengraph = function (meta, title) {
|
||||
var _ogdata = {}
|
||||
let _ogdata = {}
|
||||
if (meta.opengraph) { _ogdata = meta.opengraph }
|
||||
if (!(_ogdata.title && (typeof _ogdata.title === 'string' || typeof _ogdata.title === 'number'))) { _ogdata.title = title }
|
||||
if (!(_ogdata.description && (typeof _ogdata.description === 'string' || typeof _ogdata.description === 'number'))) { _ogdata.description = meta.description || '' }
|
||||
|
@ -424,27 +424,27 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return _ogdata
|
||||
}
|
||||
Note.updateAuthorshipByOperation = function (operation, userId, authorships) {
|
||||
var index = 0
|
||||
var timestamp = Date.now()
|
||||
let index = 0
|
||||
const timestamp = Date.now()
|
||||
for (let i = 0; i < operation.length; i++) {
|
||||
var op = operation[i]
|
||||
const op = operation[i]
|
||||
if (ot.TextOperation.isRetain(op)) {
|
||||
index += op
|
||||
} else if (ot.TextOperation.isInsert(op)) {
|
||||
let opStart = index
|
||||
let opEnd = index + op.length
|
||||
var inserted = false
|
||||
const opStart = index
|
||||
const opEnd = index + op.length
|
||||
let inserted = false
|
||||
// authorship format: [userId, startPos, endPos, createdAt, updatedAt]
|
||||
if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
|
||||
else {
|
||||
for (let j = 0; j < authorships.length; j++) {
|
||||
let authorship = authorships[j]
|
||||
const authorship = authorships[j]
|
||||
if (!inserted) {
|
||||
let nextAuthorship = authorships[j + 1] || -1
|
||||
const nextAuthorship = authorships[j + 1] || -1
|
||||
if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
|
||||
if (authorship[1] < opStart && authorship[2] > opStart) {
|
||||
// divide
|
||||
let postLength = authorship[2] - opStart
|
||||
const postLength = authorship[2] - opStart
|
||||
authorship[2] = opStart
|
||||
authorship[4] = timestamp
|
||||
authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
|
||||
|
@ -470,13 +470,13 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
index += op.length
|
||||
} else if (ot.TextOperation.isDelete(op)) {
|
||||
let opStart = index
|
||||
let opEnd = index - op
|
||||
const opStart = index
|
||||
const opEnd = index - op
|
||||
if (operation.length === 1) {
|
||||
authorships = []
|
||||
} else if (authorships.length > 0) {
|
||||
for (let j = 0; j < authorships.length; j++) {
|
||||
let authorship = authorships[j]
|
||||
const authorship = authorships[j]
|
||||
if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
|
||||
authorships.splice(j, 1)
|
||||
j -= 1
|
||||
|
@ -501,12 +501,12 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
// merge
|
||||
for (let j = 0; j < authorships.length; j++) {
|
||||
let authorship = authorships[j]
|
||||
const authorship = authorships[j]
|
||||
for (let k = j + 1; k < authorships.length; k++) {
|
||||
let nextAuthorship = authorships[k]
|
||||
const nextAuthorship = authorships[k]
|
||||
if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
|
||||
let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
|
||||
let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
|
||||
const minTimestamp = Math.min(authorship[3], nextAuthorship[3])
|
||||
const maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
|
||||
authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
|
||||
authorships.splice(k, 1)
|
||||
j -= 1
|
||||
|
@ -516,7 +516,7 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
// clear
|
||||
for (let j = 0; j < authorships.length; j++) {
|
||||
let authorship = authorships[j]
|
||||
const authorship = authorships[j]
|
||||
if (!authorship[0]) {
|
||||
authorships.splice(j, 1)
|
||||
j -= 1
|
||||
|
@ -525,13 +525,13 @@ module.exports = function (sequelize, DataTypes) {
|
|||
return authorships
|
||||
}
|
||||
Note.transformPatchToOperations = function (patch, contentLength) {
|
||||
var operations = []
|
||||
const operations = []
|
||||
if (patch.length > 0) {
|
||||
// calculate original content length
|
||||
for (let j = patch.length - 1; j >= 0; j--) {
|
||||
var p = patch[j]
|
||||
const p = patch[j]
|
||||
for (let i = 0; i < p.diffs.length; i++) {
|
||||
var diff = p.diffs[i]
|
||||
const diff = p.diffs[i]
|
||||
switch (diff[0]) {
|
||||
case 1: // insert
|
||||
contentLength -= diff[1].length
|
||||
|
@ -543,15 +543,15 @@ module.exports = function (sequelize, DataTypes) {
|
|||
}
|
||||
}
|
||||
// generate operations
|
||||
var bias = 0
|
||||
var lengthBias = 0
|
||||
let bias = 0
|
||||
let lengthBias = 0
|
||||
for (let j = 0; j < patch.length; j++) {
|
||||
var operation = []
|
||||
let p = patch[j]
|
||||
var currIndex = p.start1
|
||||
var currLength = contentLength - bias
|
||||
const operation = []
|
||||
const p = patch[j]
|
||||
let currIndex = p.start1
|
||||
const currLength = contentLength - bias
|
||||
for (let i = 0; i < p.diffs.length; i++) {
|
||||
let diff = p.diffs[i]
|
||||
const diff = p.diffs[i]
|
||||
switch (diff[0]) {
|
||||
case 0: // retain
|
||||
if (i === 0) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue