Create New Analyzer and Setup Tokenizer for it
CURL -X PUT http://localhost:9200/gaugau
{
"settings": {
"analysis": {
"analyzer": {
"my_analyzer_3_50": {
"tokenizer": "my_tokenizer_3_50",
"filter": [
"lowercase"
]
}
},
"tokenizer": {
"my_tokenizer_3_50": {
"type": "ngram",
"min_gram": 3,
"max_gram":50,
"token_chars": [
"letter",
"digit"
]
}
}
}
},
"mappings": {
"_doc": {
"properties": {
"first_name": {
"type": "text",
"analyzer": "my_analyzer_3_50"
}
}
}
}
}