mirror of
https://github.com/yingziwu/mastodon.git
synced 2026-02-15 14:53:17 +00:00
change elasticsearch tokenizer to support chinese full-text search
This commit is contained in:
parent
e3750a503e
commit
96b4da8b39
3 changed files with 3 additions and 3 deletions
|
|
@ -4,7 +4,7 @@ class AccountsIndex < Chewy::Index
|
|||
settings index: { refresh_interval: '5m' }, analysis: {
|
||||
analyzer: {
|
||||
content: {
|
||||
tokenizer: 'whitespace',
|
||||
tokenizer: 'ik_max_word',
|
||||
filter: %w(lowercase asciifolding cjk_width),
|
||||
},
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class StatusesIndex < Chewy::Index
|
|||
},
|
||||
analyzer: {
|
||||
content: {
|
||||
tokenizer: 'uax_url_email',
|
||||
tokenizer: 'ik_max_word',
|
||||
filter: %w(
|
||||
english_possessive_stemmer
|
||||
lowercase
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class TagsIndex < Chewy::Index
|
|||
settings index: { refresh_interval: '15m' }, analysis: {
|
||||
analyzer: {
|
||||
content: {
|
||||
tokenizer: 'keyword',
|
||||
tokenizer: 'ik_max_word',
|
||||
filter: %w(lowercase asciifolding cjk_width),
|
||||
},
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue