forked from elastic/go-elasticsearch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbucketsummary.go
200 lines (178 loc) · 5.88 KB
/
bucketsummary.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// Code generated from the elasticsearch-specification DO NOT EDIT.
// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757
package types
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
)
// BucketSummary type.
//
// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Bucket.ts#L31-L78
type BucketSummary struct {
// AnomalyScore The maximum anomaly score, between 0-100, for any of the bucket influencers.
// This is an overall, rate-limited
// score for the job. All the anomaly records in the bucket contribute to this
// score. This value might be updated as
// new data is analyzed.
AnomalyScore Float64 `json:"anomaly_score"`
BucketInfluencers []BucketInfluencer `json:"bucket_influencers"`
// BucketSpan The length of the bucket in seconds. This value matches the bucket span that
// is specified in the job.
BucketSpan int64 `json:"bucket_span"`
// EventCount The number of input data records processed in this bucket.
EventCount int64 `json:"event_count"`
// InitialAnomalyScore The maximum anomaly score for any of the bucket influencers. This is the
// initial value that was calculated at the
// time the bucket was processed.
InitialAnomalyScore Float64 `json:"initial_anomaly_score"`
// IsInterim If true, this is an interim result. In other words, the results are
// calculated based on partial input data.
IsInterim bool `json:"is_interim"`
// JobId Identifier for the anomaly detection job.
JobId string `json:"job_id"`
// ProcessingTimeMs The amount of time, in milliseconds, that it took to analyze the bucket
// contents and calculate results.
ProcessingTimeMs int64 `json:"processing_time_ms"`
// ResultType Internal. This value is always set to bucket.
ResultType string `json:"result_type"`
// Timestamp The start time of the bucket. This timestamp uniquely identifies the bucket.
// Events that occur exactly at the
// timestamp of the bucket are included in the results for the bucket.
Timestamp int64 `json:"timestamp"`
// TimestampString The start time of the bucket. This timestamp uniquely identifies the bucket.
// Events that occur exactly at the
// timestamp of the bucket are included in the results for the bucket.
TimestampString DateTime `json:"timestamp_string,omitempty"`
}
func (s *BucketSummary) UnmarshalJSON(data []byte) error {
dec := json.NewDecoder(bytes.NewReader(data))
for {
t, err := dec.Token()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return err
}
switch t {
case "anomaly_score":
var tmp interface{}
dec.Decode(&tmp)
switch v := tmp.(type) {
case string:
value, err := strconv.ParseFloat(v, 64)
if err != nil {
return fmt.Errorf("%s | %w", "AnomalyScore", err)
}
f := Float64(value)
s.AnomalyScore = f
case float64:
f := Float64(v)
s.AnomalyScore = f
}
case "bucket_influencers":
if err := dec.Decode(&s.BucketInfluencers); err != nil {
return fmt.Errorf("%s | %w", "BucketInfluencers", err)
}
case "bucket_span":
if err := dec.Decode(&s.BucketSpan); err != nil {
return fmt.Errorf("%s | %w", "BucketSpan", err)
}
case "event_count":
var tmp interface{}
dec.Decode(&tmp)
switch v := tmp.(type) {
case string:
value, err := strconv.ParseInt(v, 10, 64)
if err != nil {
return fmt.Errorf("%s | %w", "EventCount", err)
}
s.EventCount = value
case float64:
f := int64(v)
s.EventCount = f
}
case "initial_anomaly_score":
var tmp interface{}
dec.Decode(&tmp)
switch v := tmp.(type) {
case string:
value, err := strconv.ParseFloat(v, 64)
if err != nil {
return fmt.Errorf("%s | %w", "InitialAnomalyScore", err)
}
f := Float64(value)
s.InitialAnomalyScore = f
case float64:
f := Float64(v)
s.InitialAnomalyScore = f
}
case "is_interim":
var tmp interface{}
dec.Decode(&tmp)
switch v := tmp.(type) {
case string:
value, err := strconv.ParseBool(v)
if err != nil {
return fmt.Errorf("%s | %w", "IsInterim", err)
}
s.IsInterim = value
case bool:
s.IsInterim = v
}
case "job_id":
if err := dec.Decode(&s.JobId); err != nil {
return fmt.Errorf("%s | %w", "JobId", err)
}
case "processing_time_ms":
if err := dec.Decode(&s.ProcessingTimeMs); err != nil {
return fmt.Errorf("%s | %w", "ProcessingTimeMs", err)
}
case "result_type":
var tmp json.RawMessage
if err := dec.Decode(&tmp); err != nil {
return fmt.Errorf("%s | %w", "ResultType", err)
}
o := string(tmp[:])
o, err = strconv.Unquote(o)
if err != nil {
o = string(tmp[:])
}
s.ResultType = o
case "timestamp":
if err := dec.Decode(&s.Timestamp); err != nil {
return fmt.Errorf("%s | %w", "Timestamp", err)
}
case "timestamp_string":
if err := dec.Decode(&s.TimestampString); err != nil {
return fmt.Errorf("%s | %w", "TimestampString", err)
}
}
}
return nil
}
// NewBucketSummary returns a BucketSummary.
func NewBucketSummary() *BucketSummary {
r := &BucketSummary{}
return r
}