forked from elastic/go-elasticsearch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkeywordtokenizer.go
106 lines (88 loc) · 2.61 KB
/
keywordtokenizer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// Code generated from the elasticsearch-specification DO NOT EDIT.
// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757
package types
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
)
// KeywordTokenizer type.
//
// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L62-L65
type KeywordTokenizer struct {
BufferSize int `json:"buffer_size"`
Type string `json:"type,omitempty"`
Version *string `json:"version,omitempty"`
}
func (s *KeywordTokenizer) UnmarshalJSON(data []byte) error {
dec := json.NewDecoder(bytes.NewReader(data))
for {
t, err := dec.Token()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return err
}
switch t {
case "buffer_size":
var tmp interface{}
dec.Decode(&tmp)
switch v := tmp.(type) {
case string:
value, err := strconv.Atoi(v)
if err != nil {
return fmt.Errorf("%s | %w", "BufferSize", err)
}
s.BufferSize = value
case float64:
f := int(v)
s.BufferSize = f
}
case "type":
if err := dec.Decode(&s.Type); err != nil {
return fmt.Errorf("%s | %w", "Type", err)
}
case "version":
if err := dec.Decode(&s.Version); err != nil {
return fmt.Errorf("%s | %w", "Version", err)
}
}
}
return nil
}
// MarshalJSON override marshalling to include literal value
func (s KeywordTokenizer) MarshalJSON() ([]byte, error) {
type innerKeywordTokenizer KeywordTokenizer
tmp := innerKeywordTokenizer{
BufferSize: s.BufferSize,
Type: s.Type,
Version: s.Version,
}
tmp.Type = "keyword"
return json.Marshal(tmp)
}
// NewKeywordTokenizer returns a KeywordTokenizer.
func NewKeywordTokenizer() *KeywordTokenizer {
r := &KeywordTokenizer{}
return r
}