修改语言配置与官方一致,修改搜索token生成机制
This commit is contained in:
parent
0f12973881
commit
51e6fe7952
1 changed files with 2 additions and 1 deletions
|
@ -15,6 +15,7 @@
|
|||
{
|
||||
encode: false,
|
||||
tokenize: function(str) {
|
||||
return str.replace(/[\x00-\x7F]/g, '').split('');
|
||||
// 只去除空白行,标点符号保留、表情符号保留、数字保留、英文保留
|
||||
return str.replace(/\n*\s*\r*/g, '').split('');
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue