Removed deduplication of tokens

This commit is contained in:
Simon Cambier
2025-07-22 21:19:21 +02:00
parent c25cc8e62b
commit 1fa6898bbe

View File

@@ -39,7 +39,10 @@ export class Tokenizer {
}
// Remove duplicates
tokens = [...new Set(tokens)]
// tokens = [...new Set(tokens)]
// Remove empty tokens
tokens = tokens.filter(Boolean)
return tokens
} catch (e) {