Removed deduplication of tokens
This commit is contained in:
@@ -39,7 +39,10 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
// Remove duplicates
|
||||
tokens = [...new Set(tokens)]
|
||||
// tokens = [...new Set(tokens)]
|
||||
|
||||
// Remove empty tokens
|
||||
tokens = tokens.filter(Boolean)
|
||||
|
||||
return tokens
|
||||
} catch (e) {
|
||||
|
||||
Reference in New Issue
Block a user