Skip to content

Commit 8bd43bb

Browse files
committed
Improve tokenization of function names, closes #1930
1 parent 56a3efa commit 8bd43bb

File tree

2 files changed

+40
-11
lines changed

2 files changed

+40
-11
lines changed

assets/js/search-page.js

+39-10
Original file line numberDiff line numberDiff line change
@@ -153,17 +153,46 @@ function docTokenSplitter (builder) {
153153
}
154154

155155
function docTokenFunction (token) {
156-
// Split on : . / _ - to make easier to partially match on function names.
157-
// We split only when tokenizing, not when searching.
158-
const tokens = token
159-
.toString()
160-
.split(/\:|\.|\/|_|-/)
161-
.map(part => {
162-
return token.clone().update(() => part)
163-
})
156+
// If we have something with an arity, we split on : . to make partial
157+
// matches easier. We split only when tokenizing, not when searching.
158+
// Below we use ExDoc.Markdown.to_ast/2 as an example.
159+
const tokens = [token]
160+
const arityRegex = /\/\d+$/
161+
const namespaceRegex = /\:|\./
162+
let toSplitWords = token.toString()
163+
164+
if(arityRegex.test(toSplitWords)) {
165+
const withoutArity = token
166+
.toString()
167+
.replace(arityRegex, "")
168+
169+
// This token represents ExDoc.Markdown.to_ast
170+
tokens.push(token.clone().update(() => withoutArity))
171+
172+
// And now we get each part as token: ExDoc, Markdown, and to_ast
173+
let parts = withoutArity.split(namespaceRegex)
174+
175+
if(parts.length > 1) {
176+
for(let part of parts) {
177+
tokens.push(token.clone().update(() => part))
178+
}
179+
180+
// Let's also add to_ast/2
181+
let lastWithArity = token.toString().split(namespaceRegex)
182+
tokens.push(token.clone().update(() => lastWithArity[lastWithArity.length - 1]))
183+
}
164184

165-
if (tokens.length > 1) {
166-
return [...tokens, token]
185+
toSplitWords = parts[parts.length - 1]
186+
}
187+
188+
// Now split the function name (or the token, if that's all we had),
189+
// on _ or - (but we keep the original)
190+
let words = toSplitWords.split(/\_|\-/)
191+
192+
if(words.length > 1) {
193+
for(let word of words) {
194+
tokens.push(token.clone().update(() => word))
195+
}
167196
}
168197

169198
return tokens

formatters/html/dist/html-7XSQ4GTY.js formatters/html/dist/html-5OPRASEH.js

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)