Elastic search, how to add a new field and put a value from an existing field

I have an elastic search index as follows:

 {
  "payment_transaction": {
    "mappings": {
      "message_logs": {
        "properties": {
          "@timestamp": {
            "type": "date"
          },
          "changed_date": {
            "type": "date"
          },
          "created_date": {
            "type": "date"
          }
        }
      }
    }
  }
}

And I need to add three more fields (year, month, day). And you need to assign values ​​from an existing field (created_date). Format created_date 2016-11-22T22: 20: 21.000Z . How can i do this? The stable search version is 5.0.

+4
source share
1 answer

- created_date copy_to . , pattern_replace char, :

:

put test 
{
    "settings": {
        "analysis": {
            "char_filter": {
                "year" : {
                    "type": "pattern_replace",
                    "pattern": "(\\d{4})-(\\d{2})-(\\d{2})T\\d{2}:\\d{2}:\\d{2}.\\d{3}Z",
                    "replacement": "$1"
                },
                "month" : {
                    "type": "pattern_replace",
                    "pattern": "(\\d{4})-(\\d{2})-(\\d{2})T\\d{2}:\\d{2}:\\d{2}.\\d{3}Z",
                    "replacement": "$2"
                },
                "day" : {
                    "type": "pattern_replace",
                       "pattern": "(\\d{4})-(\\d{2})-(\\d{2})T\\d{2}:\\d{2}:\\d{2}.\\d{3}Z",
                    "replacement": "$3"
                }
            },
            "analyzer": {
                "year" : {

                    "tokenizer" : "keyword",
                    "char_filter" : ["year"]
                },
                "month" : {

                    "tokenizer" : "keyword",
                    "char_filter" : ["month"]
                },
                "day" : {

                    "tokenizer" : "keyword",
                    "char_filter" : ["day"]
                }

            }
        }
    }
}
put test/message_logs/_mapping
{


      "message_logs": {
        "properties": {
          "@timestamp": {
            "type": "date"
          },
          "changed_date": {
            "type": "date"
          },
          "created_date": {
            "type": "date",

            "copy_to" : ["year","month","day"]
          },
           "year": {
            "type": "text",
            "analyzer" : "year",
            "search_analyzer":"keyword",
            "store" : true,
            "fielddata":true


          },
          "month": {
                 "type": "text",
            "analyzer" : "month",
            "search_analyzer":"keyword",
            "store" : true,
            "fielddata":true


          },
          "day": {
               "type": "text",
            "analyzer" : "day",
            "search_analyzer":"keyword",
            "store" : true,
            "fielddata":true


          }
        }
      }


}

put test/message_logs/1 
{
    "created_date" : "2016-11-22T22:20:21.000Z"
}

post test/message_logs/_search
{
    "fielddata_fields": [
       "year",
       "month",
       "day"
    ]
}

:

    {
   "took": 3,
   "timed_out": false,
   "_shards": {
      "total": 5,
      "successful": 5,
      "failed": 0
   },
   "hits": {
      "total": 1,
      "max_score": 1,
      "hits": [
         {
            "_index": "test",
            "_type": "message_logs",
            "_id": "1",
            "_score": 1,
            "_source": {
               "created_date": "2016-11-22T22:20:21.000Z"
            },
            "fields": {
               "month": [
                  "11"
               ],
               "year": [
                  "2016"
               ],
               "day": [
                  "22"
               ]
            }
         }
      ]
   }
}

fielddata true .

+2

Source: https://habr.com/ru/post/1667620/


All Articles