2

I am new to elastic search I am trying to create index with the below mapping which i found online and using kibana as my client it throwing error as .

PUT /local_test
    {
      "settings": {
        "index.mapping.total_fields.limit": 1000,
        "index.mapping.depth.limit": 20,
        "index.mapping.nested_fields.limit": 50,
        "number_of_shards": 5,
        "number_of_replicas": 1,
        "analysis": {
          "analyzer": {
            "edge_ngram_analyzer": {
              "type": "custom",
              "tokenizer": "edge_ngram_tokenizer",
              "filter": [
                "lowercase",
                "en_stopwords"
              ]
            },
            "standard_custom": {
              "type": "custom",
              "char_filter": [
                "punctuation_remap"
              ],
              "tokenizer": "standard",
              "filter": [
                "lowercase",
                "en_stopwords"
              ]
            },
            "lowercase_keyword": {
              "type": "custom",
              "tokenizer": "keyword",
              "filter": [
                "lowercase"
              ]
            }
          },
          "tokenizer": {
            "edge_ngram_tokenizer": {
              "type": "edge_ngram",
              "min_gram": 2,
              "max_gram": 50,
              "token_chars": [
                "letter",
                "digit"
              ]
            }
          },
          "filter": {
            "en_stopwords": {
              "type": "stop",
              "stopwords": "_english_"
            }
          },
          "char_filter": {
            "punctuation_remap": {
              "type": "mapping",
              "mappings": [
                ". => -",
                ": => -",
                "' => -"
              ]
            }
          }
        }
      },
      "mappings": {
        "local_test": {
          "_all": {
            "enabled": false
          },
          "properties": {
            "id": {
              "type": "keyword"
            },
            "user_id": {
              "type": "keyword"
            },
            "created_at": {
              "type": "date",
              "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
            },
            "docvalue": {
              "type": "object",
              "dynamic": false,
              "enabled": true,
              "properties": {
                "key": {
                  "type": "text",
                  "analyzer": "lowercase_keyword"
                },
                "value": {
                  "type": "text",
                  "analyzer": "lowercase_keyword"
                }
              }
            },
            "recurring": {
              "type": "boolean"
            },
            "amount": {
              "type": "long"
            }
          }
        }
      }
    }

"type" : "mapper_parsing_exception", "reason" : "Root mapping definition has unsupported parameters: [local_test : {_all={enabled=false}, properties={amount={type=long}, user_id={type=keyword}, recurring={type=boolean}, created_at={format=yyyy-MM-dd HH:mm:ss||epoch_millis, type=date}, id={type=keyword}, docvalue={dynamic=false, type=object, enabled=true, properties={value={analyzer=lowercase_keyword, type=text}, key={analyzer=lowercase_keyword, type=text}}}}}]"

0

1 Answer 1

1

Below are two issues in your request, I am assuming you are using the latest major version ie 7.X.

  1. You need to remove _all which is removed in the latest version. see this official blog on this change.
  2. Remove your type local_test as types are also removed in the latest version. see the removal of types for more info.

So using below request it works fine:

Put /local_test

{
    "settings": {
        "index.mapping.total_fields.limit": 1000,
        "index.mapping.depth.limit": 20,
        "index.mapping.nested_fields.limit": 50,
        "number_of_shards": 5,
        "number_of_replicas": 1,
        "analysis": {
            "analyzer": {
                "edge_ngram_analyzer": {
                    "type": "custom",
                    "tokenizer": "edge_ngram_tokenizer",
                    "filter": [
                        "lowercase",
                        "en_stopwords"
                    ]
                },
                "standard_custom": {
                    "type": "custom",
                    "char_filter": [
                        "punctuation_remap"
                    ],
                    "tokenizer": "standard",
                    "filter": [
                        "lowercase",
                        "en_stopwords"
                    ]
                },
                "lowercase_keyword": {
                    "type": "custom",
                    "tokenizer": "keyword",
                    "filter": [
                        "lowercase"
                    ]
                }
            },
            "tokenizer": {
                "edge_ngram_tokenizer": {
                    "type": "edge_ngram",
                    "min_gram": 2,
                    "max_gram": 50,
                    "token_chars": [
                        "letter",
                        "digit"
                    ]
                }
            },
            "filter": {
                "en_stopwords": {
                    "type": "stop",
                    "stopwords": "_english_"
                }
            },
            "char_filter": {
                "punctuation_remap": {
                    "type": "mapping",
                    "mappings": [
                        ". => -",
                        ": => -",
                        "' => -"
                    ]
                }
            }
        }
    },
    "mappings": {
        "properties": {
            "id": {
                "type": "keyword"
            },
            "user_id": {
                "type": "keyword"
            },
            "created_at": {
                "type": "date",
                "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
            },
            "docvalue": {
                "type": "object",
                "dynamic": false,
                "enabled": true,
                "properties": {
                    "key": {
                        "type": "text",
                        "analyzer": "lowercase_keyword"
                    },
                    "value": {
                        "type": "text",
                        "analyzer": "lowercase_keyword"
                    }
                }
            },
            "recurring": {
                "type": "boolean"
            },
            "amount": {
                "type": "long"
            }
        }
    }
}

output

{
    "acknowledged": true,
    "shards_acknowledged": true,
    "index": "local_test"
}
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.