@@ -153,17 +153,46 @@ function docTokenSplitter (builder) {
153
153
}
154
154
155
155
function docTokenFunction ( token ) {
156
- // Split on : . / _ - to make easier to partially match on function names.
157
- // We split only when tokenizing, not when searching.
158
- const tokens = token
159
- . toString ( )
160
- . split ( / \: | \. | \/ | _ | - / )
161
- . map ( part => {
162
- return token . clone ( ) . update ( ( ) => part )
163
- } )
156
+ // If we have something with an arity, we split on : . to make partial
157
+ // matches easier. We split only when tokenizing, not when searching.
158
+ // Below we use ExDoc.Markdown.to_ast/2 as an example.
159
+ const tokens = [ token ]
160
+ const arityRegex = / \/ \d + $ /
161
+ const namespaceRegex = / \: | \. /
162
+ let toSplitWords = token . toString ( )
163
+
164
+ if ( arityRegex . test ( toSplitWords ) ) {
165
+ const withoutArity = token
166
+ . toString ( )
167
+ . replace ( arityRegex , "" )
168
+
169
+ // This token represents ExDoc.Markdown.to_ast
170
+ tokens . push ( token . clone ( ) . update ( ( ) => withoutArity ) )
171
+
172
+ // And now we get each part as token: ExDoc, Markdown, and to_ast
173
+ let parts = withoutArity . split ( namespaceRegex )
174
+
175
+ if ( parts . length > 1 ) {
176
+ for ( let part of parts ) {
177
+ tokens . push ( token . clone ( ) . update ( ( ) => part ) )
178
+ }
179
+
180
+ // Let's also add to_ast/2
181
+ let lastWithArity = token . toString ( ) . split ( namespaceRegex )
182
+ tokens . push ( token . clone ( ) . update ( ( ) => lastWithArity [ lastWithArity . length - 1 ] ) )
183
+ }
164
184
165
- if ( tokens . length > 1 ) {
166
- return [ ...tokens , token ]
185
+ toSplitWords = parts [ parts . length - 1 ]
186
+ }
187
+
188
+ // Now split the function name (or the token, if that's all we had),
189
+ // on _ or - (but we keep the original)
190
+ let words = toSplitWords . split ( / \_ | \- / )
191
+
192
+ if ( words . length > 1 ) {
193
+ for ( let word of words ) {
194
+ tokens . push ( token . clone ( ) . update ( ( ) => word ) )
195
+ }
167
196
}
168
197
169
198
return tokens
0 commit comments