forked from docsifyjs/docsify
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathembed.js
115 lines (102 loc) · 2.9 KB
/
embed.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import {get} from '../fetch/ajax'
import {merge} from '../util/core'
const cached = {}
function walkFetchEmbed({embedTokens, compile, fetch}, cb) {
let token
let step = 0
let count = 1
if (!embedTokens.length) {
return cb({})
}
while ((token = embedTokens[step++])) {
const next = (function (token) {
return text => {
let embedToken
if (text) {
if (token.embed.type === 'markdown') {
embedToken = compile.lexer(text)
} else if (token.embed.type === 'code') {
if (token.embed.fragment) {
const fragment = token.embed.fragment
const pattern = new RegExp(`(?:###|\\/\\/\\/)\\s*\\[${fragment}\\]([\\s\\S]*)(?:###|\\/\\/\\/)\\s*\\[${fragment}\\]`)
text = ((text.match(pattern) || [])[1] || '').trim()
}
embedToken = compile.lexer(
'```' +
token.embed.lang +
'\n' +
text.replace(/`/g, '@DOCSIFY_QM@') +
'\n```\n'
)
} else if (token.embed.type === 'mermaid') {
embedToken = [
{type: 'html', text: `<div class="mermaid">\n${text}\n</div>`}
]
embedToken.links = {}
} else {
embedToken = [{type: 'html', text}]
embedToken.links = {}
}
}
cb({token, embedToken})
if (++count >= step) {
cb({})
}
}
})(token)
if (token.embed.url) {
if (process.env.SSR) {
fetch(token.embed.url).then(next)
} else {
get(token.embed.url).then(next)
}
} else {
next(token.embed.html)
}
}
}
export function prerenderEmbed({compiler, raw = '', fetch}, done) {
let hit = cached[raw]
if (hit) {
const copy = hit.slice()
copy.links = hit.links
return done(copy)
}
const compile = compiler._marked
let tokens = compile.lexer(raw)
const embedTokens = []
const linkRE = compile.InlineLexer.rules.link
const links = tokens.links
tokens.forEach((token, index) => {
if (token.type === 'paragraph') {
token.text = token.text.replace(
new RegExp(linkRE.source, 'g'),
(src, filename, href, title) => {
const embed = compiler.compileEmbed(href, title)
if (embed) {
embedTokens.push({
index,
embed
})
}
return src
}
)
}
})
let moveIndex = 0
walkFetchEmbed({compile, embedTokens, fetch}, ({embedToken, token}) => {
if (token) {
const index = token.index + moveIndex
merge(links, embedToken.links)
tokens = tokens
.slice(0, index)
.concat(embedToken, tokens.slice(index + 1))
moveIndex += embedToken.length - 1
} else {
cached[raw] = tokens.concat()
tokens.links = cached[raw].links = links
done(tokens)
}
})
}