fix: tentative workaround for #383
This commit is contained in:
@@ -15,6 +15,7 @@ export class Tokenizer {
|
||||
* @returns
|
||||
*/
|
||||
public tokenizeForIndexing(text: string): string[] {
|
||||
try {
|
||||
const words = this.tokenizeWords(text)
|
||||
let urls: string[] = []
|
||||
if (this.plugin.settings.tokenizeUrls) {
|
||||
@@ -45,6 +46,10 @@ export class Tokenizer {
|
||||
tokens = [...new Set(tokens)]
|
||||
|
||||
return tokens
|
||||
} catch (e) {
|
||||
console.error('Error tokenizing text, skipping document', e)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user